blob: ef4d0f1d2e0e1a3b1034164a9d7b424439eb97a0 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000040static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
41static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
Mark Mendell09ed1a32015-03-25 08:30:06 -040042
Mark Mendellfb8d2792015-03-31 22:16:59 -040043IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000044 : arena_(codegen->GetGraph()->GetArena()),
45 codegen_(codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -040046}
47
48
Mark Mendell09ed1a32015-03-25 08:30:06 -040049X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010050 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040051}
52
53ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
54 return codegen_->GetGraph()->GetArena();
55}
56
57bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
58 Dispatch(invoke);
59 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000060 if (res == nullptr) {
61 return false;
62 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040064}
65
Roland Levillainec525fc2015-04-28 15:50:20 +010066static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010067 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010068 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040069}
70
Andreas Gampe85b62f22015-09-09 13:15:38 -070071using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040072
Roland Levillain0b671c02016-08-19 12:02:34 +010073// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
74#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
75
76// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
77class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode {
78 public:
79 explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction)
80 : SlowPathCode(instruction) {
81 DCHECK(kEmitCompilerReadBarrier);
82 DCHECK(kUseBakerReadBarrier);
83 }
84
85 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
86 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
87 LocationSummary* locations = instruction_->GetLocations();
88 DCHECK(locations->CanCall());
89 DCHECK(instruction_->IsInvokeStaticOrDirect())
90 << "Unexpected instruction in read barrier arraycopy slow path: "
91 << instruction_->DebugName();
92 DCHECK(instruction_->GetLocations()->Intrinsified());
93 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
94
95 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
96 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
97
98 Register src = locations->InAt(0).AsRegister<Register>();
99 Location src_pos = locations->InAt(1);
100 Register dest = locations->InAt(2).AsRegister<Register>();
101 Location dest_pos = locations->InAt(3);
102 Location length = locations->InAt(4);
103 Location temp1_loc = locations->GetTemp(0);
104 Register temp1 = temp1_loc.AsRegister<Register>();
105 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
106 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
107
108 __ Bind(GetEntryLabel());
109 // In this code path, registers `temp1`, `temp2`, and `temp3`
110 // (resp.) are not used for the base source address, the base
111 // destination address, and the end source address (resp.), as in
112 // other SystemArrayCopy intrinsic code paths. Instead they are
113 // (resp.) used for:
114 // - the loop index (`i`);
115 // - the source index (`src_index`) and the loaded (source)
116 // reference (`value`); and
117 // - the destination index (`dest_index`).
118
119 // i = 0
120 __ xorl(temp1, temp1);
121 NearLabel loop;
122 __ Bind(&loop);
123 // value = src_array[i + src_pos]
124 if (src_pos.IsConstant()) {
125 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
126 int32_t adjusted_offset = offset + constant * element_size;
127 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset));
128 } else {
129 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
130 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset));
131 }
132 __ MaybeUnpoisonHeapReference(temp2);
133 // TODO: Inline the mark bit check before calling the runtime?
134 // value = ReadBarrier::Mark(value)
135 // No need to save live registers; it's taken care of by the
136 // entrypoint. Also, there is no need to update the stack mask,
137 // as this runtime call will not trigger a garbage collection.
138 // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more
139 // explanations.)
140 DCHECK_NE(temp2, ESP);
141 DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2;
142 int32_t entry_point_offset =
143 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2);
144 // This runtime call does not require a stack map.
145 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
146 __ MaybePoisonHeapReference(temp2);
147 // dest_array[i + dest_pos] = value
148 if (dest_pos.IsConstant()) {
149 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
150 int32_t adjusted_offset = offset + constant * element_size;
151 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2);
152 } else {
153 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
154 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2);
155 }
156 // ++i
157 __ addl(temp1, Immediate(1));
158 // if (i != length) goto loop
159 x86_codegen->GenerateIntCompare(temp1_loc, length);
160 __ j(kNotEqual, &loop);
161 __ jmp(GetExitLabel());
162 }
163
164 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathX86"; }
165
166 private:
167 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86);
168};
169
170#undef __
171
Mark Mendell09ed1a32015-03-25 08:30:06 -0400172#define __ assembler->
173
174static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
175 LocationSummary* locations = new (arena) LocationSummary(invoke,
176 LocationSummary::kNoCall,
177 kIntrinsified);
178 locations->SetInAt(0, Location::RequiresFpuRegister());
179 locations->SetOut(Location::RequiresRegister());
180 if (is64bit) {
181 locations->AddTemp(Location::RequiresFpuRegister());
182 }
183}
184
185static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
186 LocationSummary* locations = new (arena) LocationSummary(invoke,
187 LocationSummary::kNoCall,
188 kIntrinsified);
189 locations->SetInAt(0, Location::RequiresRegister());
190 locations->SetOut(Location::RequiresFpuRegister());
191 if (is64bit) {
192 locations->AddTemp(Location::RequiresFpuRegister());
193 locations->AddTemp(Location::RequiresFpuRegister());
194 }
195}
196
197static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
198 Location input = locations->InAt(0);
199 Location output = locations->Out();
200 if (is64bit) {
201 // Need to use the temporary.
202 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
203 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
204 __ movd(output.AsRegisterPairLow<Register>(), temp);
205 __ psrlq(temp, Immediate(32));
206 __ movd(output.AsRegisterPairHigh<Register>(), temp);
207 } else {
208 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
209 }
210}
211
212static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
213 Location input = locations->InAt(0);
214 Location output = locations->Out();
215 if (is64bit) {
216 // Need to use the temporary.
217 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
218 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
219 __ movd(temp1, input.AsRegisterPairLow<Register>());
220 __ movd(temp2, input.AsRegisterPairHigh<Register>());
221 __ punpckldq(temp1, temp2);
222 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
223 } else {
224 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
225 }
226}
227
228void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000229 CreateFPToIntLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400230}
231void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000232 CreateIntToFPLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400233}
234
235void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000236 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400237}
238void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000239 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400240}
241
242void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000243 CreateFPToIntLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400244}
245void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000246 CreateIntToFPLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400247}
248
249void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000250 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400251}
252void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000253 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400254}
255
256static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
257 LocationSummary* locations = new (arena) LocationSummary(invoke,
258 LocationSummary::kNoCall,
259 kIntrinsified);
260 locations->SetInAt(0, Location::RequiresRegister());
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
265 LocationSummary* locations = new (arena) LocationSummary(invoke,
266 LocationSummary::kNoCall,
267 kIntrinsified);
268 locations->SetInAt(0, Location::RequiresRegister());
269 locations->SetOut(Location::RequiresRegister());
270}
271
272static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
273 LocationSummary* locations = new (arena) LocationSummary(invoke,
274 LocationSummary::kNoCall,
275 kIntrinsified);
276 locations->SetInAt(0, Location::RequiresRegister());
277 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
278}
279
280static void GenReverseBytes(LocationSummary* locations,
281 Primitive::Type size,
282 X86Assembler* assembler) {
283 Register out = locations->Out().AsRegister<Register>();
284
285 switch (size) {
286 case Primitive::kPrimShort:
287 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
288 __ bswapl(out);
289 __ sarl(out, Immediate(16));
290 break;
291 case Primitive::kPrimInt:
292 __ bswapl(out);
293 break;
294 default:
295 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
296 UNREACHABLE();
297 }
298}
299
300void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
305 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
306}
307
Mark Mendell58d25fd2015-04-03 14:52:31 -0400308void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
309 CreateLongToLongLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
313 LocationSummary* locations = invoke->GetLocations();
314 Location input = locations->InAt(0);
315 Register input_lo = input.AsRegisterPairLow<Register>();
316 Register input_hi = input.AsRegisterPairHigh<Register>();
317 Location output = locations->Out();
318 Register output_lo = output.AsRegisterPairLow<Register>();
319 Register output_hi = output.AsRegisterPairHigh<Register>();
320
321 X86Assembler* assembler = GetAssembler();
322 // Assign the inputs to the outputs, mixing low/high.
323 __ movl(output_lo, input_hi);
324 __ movl(output_hi, input_lo);
325 __ bswapl(output_lo);
326 __ bswapl(output_hi);
327}
328
Mark Mendell09ed1a32015-03-25 08:30:06 -0400329void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
334 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
335}
336
337
338// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
339// need is 64b.
340
341static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
342 // TODO: Enable memory operations when the assembler supports them.
343 LocationSummary* locations = new (arena) LocationSummary(invoke,
344 LocationSummary::kNoCall,
345 kIntrinsified);
346 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400347 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000348 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
349 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000350 if (static_or_direct->HasSpecialInput() &&
351 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000352 // We need addressibility for the constant area.
353 locations->SetInAt(1, Location::RequiresRegister());
354 // We need a temporary to hold the constant.
355 locations->AddTemp(Location::RequiresFpuRegister());
356 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400357}
358
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000359static void MathAbsFP(LocationSummary* locations,
360 bool is64bit,
361 X86Assembler* assembler,
362 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400363 Location output = locations->Out();
364
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000365 DCHECK(output.IsFpuRegister());
Nicolas Geoffray97793072016-02-16 15:33:54 +0000366 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000367 DCHECK(locations->InAt(1).IsRegister());
368 // We also have a constant area pointer.
369 Register constant_area = locations->InAt(1).AsRegister<Register>();
370 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
371 if (is64bit) {
372 __ movsd(temp, codegen->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF), constant_area));
373 __ andpd(output.AsFpuRegister<XmmRegister>(), temp);
374 } else {
375 __ movss(temp, codegen->LiteralInt32Address(INT32_C(0x7FFFFFFF), constant_area));
376 __ andps(output.AsFpuRegister<XmmRegister>(), temp);
377 }
378 } else {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400379 // Create the right constant on an aligned stack.
380 if (is64bit) {
381 __ subl(ESP, Immediate(8));
382 __ pushl(Immediate(0x7FFFFFFF));
383 __ pushl(Immediate(0xFFFFFFFF));
384 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
385 } else {
386 __ subl(ESP, Immediate(12));
387 __ pushl(Immediate(0x7FFFFFFF));
388 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
389 }
390 __ addl(ESP, Immediate(16));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400391 }
392}
393
394void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
395 CreateFloatToFloat(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000399 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400400}
401
402void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
403 CreateFloatToFloat(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000407 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408}
409
410static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
411 LocationSummary* locations = new (arena) LocationSummary(invoke,
412 LocationSummary::kNoCall,
413 kIntrinsified);
414 locations->SetInAt(0, Location::RegisterLocation(EAX));
415 locations->SetOut(Location::SameAsFirstInput());
416 locations->AddTemp(Location::RegisterLocation(EDX));
417}
418
419static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
420 Location output = locations->Out();
421 Register out = output.AsRegister<Register>();
422 DCHECK_EQ(out, EAX);
423 Register temp = locations->GetTemp(0).AsRegister<Register>();
424 DCHECK_EQ(temp, EDX);
425
426 // Sign extend EAX into EDX.
427 __ cdq();
428
429 // XOR EAX with sign.
430 __ xorl(EAX, EDX);
431
432 // Subtract out sign to correct.
433 __ subl(EAX, EDX);
434
435 // The result is in EAX.
436}
437
438static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
439 LocationSummary* locations = new (arena) LocationSummary(invoke,
440 LocationSummary::kNoCall,
441 kIntrinsified);
442 locations->SetInAt(0, Location::RequiresRegister());
443 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
444 locations->AddTemp(Location::RequiresRegister());
445}
446
447static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
448 Location input = locations->InAt(0);
449 Register input_lo = input.AsRegisterPairLow<Register>();
450 Register input_hi = input.AsRegisterPairHigh<Register>();
451 Location output = locations->Out();
452 Register output_lo = output.AsRegisterPairLow<Register>();
453 Register output_hi = output.AsRegisterPairHigh<Register>();
454 Register temp = locations->GetTemp(0).AsRegister<Register>();
455
456 // Compute the sign into the temporary.
457 __ movl(temp, input_hi);
458 __ sarl(temp, Immediate(31));
459
460 // Store the sign into the output.
461 __ movl(output_lo, temp);
462 __ movl(output_hi, temp);
463
464 // XOR the input to the output.
465 __ xorl(output_lo, input_lo);
466 __ xorl(output_hi, input_hi);
467
468 // Subtract the sign.
469 __ subl(output_lo, temp);
470 __ sbbl(output_hi, temp);
471}
472
473void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
474 CreateAbsIntLocation(arena_, invoke);
475}
476
477void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
478 GenAbsInteger(invoke->GetLocations(), GetAssembler());
479}
480
481void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
482 CreateAbsLongLocation(arena_, invoke);
483}
484
485void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
486 GenAbsLong(invoke->GetLocations(), GetAssembler());
487}
488
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000489static void GenMinMaxFP(LocationSummary* locations,
490 bool is_min,
491 bool is_double,
492 X86Assembler* assembler,
493 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400494 Location op1_loc = locations->InAt(0);
495 Location op2_loc = locations->InAt(1);
496 Location out_loc = locations->Out();
497 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
498
499 // Shortcut for same input locations.
500 if (op1_loc.Equals(op2_loc)) {
501 DCHECK(out_loc.Equals(op1_loc));
502 return;
503 }
504
505 // (out := op1)
506 // out <=? op2
507 // if Nan jmp Nan_label
508 // if out is min jmp done
509 // if op2 is min jmp op2_label
510 // handle -0/+0
511 // jmp done
512 // Nan_label:
513 // out := NaN
514 // op2_label:
515 // out := op2
516 // done:
517 //
518 // This removes one jmp, but needs to copy one input (op1) to out.
519 //
520 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
521
522 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
523
Mark Mendell0c9497d2015-08-21 09:30:05 -0400524 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400525 if (is_double) {
526 __ ucomisd(out, op2);
527 } else {
528 __ ucomiss(out, op2);
529 }
530
531 __ j(Condition::kParityEven, &nan);
532
533 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
534 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
535
536 // Handle 0.0/-0.0.
537 if (is_min) {
538 if (is_double) {
539 __ orpd(out, op2);
540 } else {
541 __ orps(out, op2);
542 }
543 } else {
544 if (is_double) {
545 __ andpd(out, op2);
546 } else {
547 __ andps(out, op2);
548 }
549 }
550 __ jmp(&done);
551
552 // NaN handling.
553 __ Bind(&nan);
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000554 // Do we have a constant area pointer?
Nicolas Geoffray97793072016-02-16 15:33:54 +0000555 if (locations->GetInputCount() == 3 && locations->InAt(2).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000556 DCHECK(locations->InAt(2).IsRegister());
557 Register constant_area = locations->InAt(2).AsRegister<Register>();
558 if (is_double) {
559 __ movsd(out, codegen->LiteralInt64Address(kDoubleNaN, constant_area));
560 } else {
561 __ movss(out, codegen->LiteralInt32Address(kFloatNaN, constant_area));
562 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400563 } else {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000564 if (is_double) {
565 __ pushl(Immediate(kDoubleNaNHigh));
566 __ pushl(Immediate(kDoubleNaNLow));
567 __ movsd(out, Address(ESP, 0));
568 __ addl(ESP, Immediate(8));
569 } else {
570 __ pushl(Immediate(kFloatNaN));
571 __ movss(out, Address(ESP, 0));
572 __ addl(ESP, Immediate(4));
573 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400574 }
575 __ jmp(&done);
576
577 // out := op2;
578 __ Bind(&op2_label);
579 if (is_double) {
580 __ movsd(out, op2);
581 } else {
582 __ movss(out, op2);
583 }
584
585 // Done.
586 __ Bind(&done);
587}
588
589static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
590 LocationSummary* locations = new (arena) LocationSummary(invoke,
591 LocationSummary::kNoCall,
592 kIntrinsified);
593 locations->SetInAt(0, Location::RequiresFpuRegister());
594 locations->SetInAt(1, Location::RequiresFpuRegister());
595 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
596 // the second input to be the output (we can simply swap inputs).
597 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000598 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
599 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000600 if (static_or_direct->HasSpecialInput() &&
601 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000602 locations->SetInAt(2, Location::RequiresRegister());
603 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400604}
605
606void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
607 CreateFPFPToFPLocations(arena_, invoke);
608}
609
610void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000611 GenMinMaxFP(invoke->GetLocations(),
612 /* is_min */ true,
613 /* is_double */ true,
614 GetAssembler(),
615 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400616}
617
618void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
619 CreateFPFPToFPLocations(arena_, invoke);
620}
621
622void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000623 GenMinMaxFP(invoke->GetLocations(),
624 /* is_min */ true,
625 /* is_double */ false,
626 GetAssembler(),
627 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400628}
629
630void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
631 CreateFPFPToFPLocations(arena_, invoke);
632}
633
634void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000635 GenMinMaxFP(invoke->GetLocations(),
636 /* is_min */ false,
637 /* is_double */ true,
638 GetAssembler(),
639 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400640}
641
642void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
643 CreateFPFPToFPLocations(arena_, invoke);
644}
645
646void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000647 GenMinMaxFP(invoke->GetLocations(),
648 /* is_min */ false,
649 /* is_double */ false,
650 GetAssembler(),
651 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400652}
653
654static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
655 X86Assembler* assembler) {
656 Location op1_loc = locations->InAt(0);
657 Location op2_loc = locations->InAt(1);
658
659 // Shortcut for same input locations.
660 if (op1_loc.Equals(op2_loc)) {
661 // Can return immediately, as op1_loc == out_loc.
662 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
663 // a copy here.
664 DCHECK(locations->Out().Equals(op1_loc));
665 return;
666 }
667
668 if (is_long) {
669 // Need to perform a subtract to get the sign right.
670 // op1 is already in the same location as the output.
671 Location output = locations->Out();
672 Register output_lo = output.AsRegisterPairLow<Register>();
673 Register output_hi = output.AsRegisterPairHigh<Register>();
674
675 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
676 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
677
678 // Spare register to compute the subtraction to set condition code.
679 Register temp = locations->GetTemp(0).AsRegister<Register>();
680
681 // Subtract off op2_low.
682 __ movl(temp, output_lo);
683 __ subl(temp, op2_lo);
684
685 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
686 __ movl(temp, output_hi);
687 __ sbbl(temp, op2_hi);
688
689 // Now the condition code is correct.
690 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
691 __ cmovl(cond, output_lo, op2_lo);
692 __ cmovl(cond, output_hi, op2_hi);
693 } else {
694 Register out = locations->Out().AsRegister<Register>();
695 Register op2 = op2_loc.AsRegister<Register>();
696
697 // (out := op1)
698 // out <=? op2
699 // if out is min jmp done
700 // out := op2
701 // done:
702
703 __ cmpl(out, op2);
704 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
705 __ cmovl(cond, out, op2);
706 }
707}
708
709static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
710 LocationSummary* locations = new (arena) LocationSummary(invoke,
711 LocationSummary::kNoCall,
712 kIntrinsified);
713 locations->SetInAt(0, Location::RequiresRegister());
714 locations->SetInAt(1, Location::RequiresRegister());
715 locations->SetOut(Location::SameAsFirstInput());
716}
717
718static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
719 LocationSummary* locations = new (arena) LocationSummary(invoke,
720 LocationSummary::kNoCall,
721 kIntrinsified);
722 locations->SetInAt(0, Location::RequiresRegister());
723 locations->SetInAt(1, Location::RequiresRegister());
724 locations->SetOut(Location::SameAsFirstInput());
725 // Register to use to perform a long subtract to set cc.
726 locations->AddTemp(Location::RequiresRegister());
727}
728
729void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
730 CreateIntIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400735}
736
737void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
738 CreateLongLongToLongLocations(arena_, invoke);
739}
740
741void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000742 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400743}
744
745void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
746 CreateIntIntToIntLocations(arena_, invoke);
747}
748
749void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000750 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400751}
752
753void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
754 CreateLongLongToLongLocations(arena_, invoke);
755}
756
757void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400759}
760
761static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
762 LocationSummary* locations = new (arena) LocationSummary(invoke,
763 LocationSummary::kNoCall,
764 kIntrinsified);
765 locations->SetInAt(0, Location::RequiresFpuRegister());
766 locations->SetOut(Location::RequiresFpuRegister());
767}
768
769void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
770 CreateFPToFPLocations(arena_, invoke);
771}
772
773void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
774 LocationSummary* locations = invoke->GetLocations();
775 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
776 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
777
778 GetAssembler()->sqrtsd(out, in);
779}
780
Mark Mendellfb8d2792015-03-31 22:16:59 -0400781static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100782 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400783
784 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100785 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
786 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700787 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400788
789 // Copy the result back to the expected output.
790 Location out = invoke->GetLocations()->Out();
791 if (out.IsValid()) {
792 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700793 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400794 }
795}
796
797static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
798 HInvoke* invoke,
799 CodeGeneratorX86* codegen) {
800 // Do we have instruction support?
801 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
802 CreateFPToFPLocations(arena, invoke);
803 return;
804 }
805
806 // We have to fall back to a call to the intrinsic.
807 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100808 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400809 InvokeRuntimeCallingConvention calling_convention;
810 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
811 locations->SetOut(Location::FpuRegisterLocation(XMM0));
812 // Needs to be EAX for the invoke.
813 locations->AddTemp(Location::RegisterLocation(EAX));
814}
815
816static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
817 HInvoke* invoke,
818 X86Assembler* assembler,
819 int round_mode) {
820 LocationSummary* locations = invoke->GetLocations();
821 if (locations->WillCall()) {
822 InvokeOutOfLineIntrinsic(codegen, invoke);
823 } else {
824 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
825 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
826 __ roundsd(out, in, Immediate(round_mode));
827 }
828}
829
830void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
831 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
832}
833
834void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
835 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
836}
837
838void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
839 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
840}
841
842void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
843 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
844}
845
846void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
847 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
848}
849
850void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
851 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
852}
853
Mark Mendellfb8d2792015-03-31 22:16:59 -0400854void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
855 // Do we have instruction support?
856 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
Aart Bik2c9f4952016-08-01 16:52:27 -0700857 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
858 DCHECK(static_or_direct != nullptr);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400859 LocationSummary* locations = new (arena_) LocationSummary(invoke,
860 LocationSummary::kNoCall,
861 kIntrinsified);
862 locations->SetInAt(0, Location::RequiresFpuRegister());
Aart Bik2c9f4952016-08-01 16:52:27 -0700863 if (static_or_direct->HasSpecialInput() &&
864 invoke->InputAt(
865 static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
866 locations->SetInAt(1, Location::RequiresRegister());
867 }
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100868 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400869 locations->AddTemp(Location::RequiresFpuRegister());
870 locations->AddTemp(Location::RequiresFpuRegister());
871 return;
872 }
873
874 // We have to fall back to a call to the intrinsic.
875 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Aart Bik2c9f4952016-08-01 16:52:27 -0700876 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400877 InvokeRuntimeCallingConvention calling_convention;
878 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
879 locations->SetOut(Location::RegisterLocation(EAX));
880 // Needs to be EAX for the invoke.
881 locations->AddTemp(Location::RegisterLocation(EAX));
882}
883
884void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
885 LocationSummary* locations = invoke->GetLocations();
Aart Bik2c9f4952016-08-01 16:52:27 -0700886 if (locations->WillCall()) { // TODO: can we reach this?
Mark Mendellfb8d2792015-03-31 22:16:59 -0400887 InvokeOutOfLineIntrinsic(codegen_, invoke);
888 return;
889 }
890
Mark Mendellfb8d2792015-03-31 22:16:59 -0400891 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700892 XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
893 XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendellfb8d2792015-03-31 22:16:59 -0400894 Register out = locations->Out().AsRegister<Register>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700895 NearLabel skip_incr, done;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400896 X86Assembler* assembler = GetAssembler();
897
Aart Bik2c9f4952016-08-01 16:52:27 -0700898 // Since no direct x86 rounding instruction matches the required semantics,
899 // this intrinsic is implemented as follows:
900 // result = floor(in);
901 // if (in - result >= 0.5f)
902 // result = result + 1.0f;
903 __ movss(t2, in);
904 __ roundss(t1, in, Immediate(1));
905 __ subss(t2, t1);
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700906 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
907 // Direct constant area available.
908 Register constant_area = locations->InAt(1).AsRegister<Register>();
909 __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f), constant_area));
910 __ j(kBelow, &skip_incr);
911 __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f), constant_area));
912 __ Bind(&skip_incr);
913 } else {
914 // No constant area: go through stack.
915 __ pushl(Immediate(bit_cast<int32_t, float>(0.5f)));
916 __ pushl(Immediate(bit_cast<int32_t, float>(1.0f)));
917 __ comiss(t2, Address(ESP, 4));
918 __ j(kBelow, &skip_incr);
919 __ addss(t1, Address(ESP, 0));
920 __ Bind(&skip_incr);
921 __ addl(ESP, Immediate(8));
922 }
Mark Mendellfb8d2792015-03-31 22:16:59 -0400923
Aart Bik2c9f4952016-08-01 16:52:27 -0700924 // Final conversion to an integer. Unfortunately this also does not have a
925 // direct x86 instruction, since NaN should map to 0 and large positive
926 // values need to be clipped to the extreme value.
Mark Mendellfb8d2792015-03-31 22:16:59 -0400927 __ movl(out, Immediate(kPrimIntMax));
Aart Bik2c9f4952016-08-01 16:52:27 -0700928 __ cvtsi2ss(t2, out);
929 __ comiss(t1, t2);
930 __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered
931 __ movl(out, Immediate(0)); // does not change flags
932 __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out)
933 __ cvttss2si(out, t1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400934 __ Bind(&done);
935}
936
Mark Mendella4f12202015-08-06 15:23:34 -0400937static void CreateFPToFPCallLocations(ArenaAllocator* arena,
938 HInvoke* invoke) {
939 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100940 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -0400941 kIntrinsified);
942 InvokeRuntimeCallingConvention calling_convention;
943 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
944 locations->SetOut(Location::FpuRegisterLocation(XMM0));
945}
946
947static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
948 LocationSummary* locations = invoke->GetLocations();
949 DCHECK(locations->WillCall());
950 DCHECK(invoke->IsInvokeStaticOrDirect());
951 X86Assembler* assembler = codegen->GetAssembler();
952
953 // We need some place to pass the parameters.
954 __ subl(ESP, Immediate(16));
955 __ cfi().AdjustCFAOffset(16);
956
957 // Pass the parameters at the bottom of the stack.
958 __ movsd(Address(ESP, 0), XMM0);
959
960 // If we have a second parameter, pass it next.
961 if (invoke->GetNumberOfArguments() == 2) {
962 __ movsd(Address(ESP, 8), XMM1);
963 }
964
965 // Now do the actual call.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100966 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Mark Mendella4f12202015-08-06 15:23:34 -0400967
968 // Extract the return value from the FP stack.
969 __ fstpl(Address(ESP, 0));
970 __ movsd(XMM0, Address(ESP, 0));
971
972 // And clean up the stack.
973 __ addl(ESP, Immediate(16));
974 __ cfi().AdjustCFAOffset(-16);
Mark Mendella4f12202015-08-06 15:23:34 -0400975}
976
977void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
978 CreateFPToFPCallLocations(arena_, invoke);
979}
980
981void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
982 GenFPToFPCall(invoke, codegen_, kQuickCos);
983}
984
985void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
986 CreateFPToFPCallLocations(arena_, invoke);
987}
988
989void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
990 GenFPToFPCall(invoke, codegen_, kQuickSin);
991}
992
993void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
994 CreateFPToFPCallLocations(arena_, invoke);
995}
996
997void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
998 GenFPToFPCall(invoke, codegen_, kQuickAcos);
999}
1000
1001void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
1002 CreateFPToFPCallLocations(arena_, invoke);
1003}
1004
1005void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
1006 GenFPToFPCall(invoke, codegen_, kQuickAsin);
1007}
1008
1009void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
1010 CreateFPToFPCallLocations(arena_, invoke);
1011}
1012
1013void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
1014 GenFPToFPCall(invoke, codegen_, kQuickAtan);
1015}
1016
1017void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
1018 CreateFPToFPCallLocations(arena_, invoke);
1019}
1020
1021void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
1022 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
1023}
1024
1025void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
1026 CreateFPToFPCallLocations(arena_, invoke);
1027}
1028
1029void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
1030 GenFPToFPCall(invoke, codegen_, kQuickCosh);
1031}
1032
1033void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
1034 CreateFPToFPCallLocations(arena_, invoke);
1035}
1036
1037void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
1038 GenFPToFPCall(invoke, codegen_, kQuickExp);
1039}
1040
1041void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
1042 CreateFPToFPCallLocations(arena_, invoke);
1043}
1044
1045void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
1046 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
1047}
1048
1049void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
1050 CreateFPToFPCallLocations(arena_, invoke);
1051}
1052
1053void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
1054 GenFPToFPCall(invoke, codegen_, kQuickLog);
1055}
1056
1057void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
1058 CreateFPToFPCallLocations(arena_, invoke);
1059}
1060
1061void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
1062 GenFPToFPCall(invoke, codegen_, kQuickLog10);
1063}
1064
1065void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
1066 CreateFPToFPCallLocations(arena_, invoke);
1067}
1068
1069void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
1070 GenFPToFPCall(invoke, codegen_, kQuickSinh);
1071}
1072
1073void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
1074 CreateFPToFPCallLocations(arena_, invoke);
1075}
1076
1077void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
1078 GenFPToFPCall(invoke, codegen_, kQuickTan);
1079}
1080
1081void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
1082 CreateFPToFPCallLocations(arena_, invoke);
1083}
1084
1085void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
1086 GenFPToFPCall(invoke, codegen_, kQuickTanh);
1087}
1088
1089static void CreateFPFPToFPCallLocations(ArenaAllocator* arena,
1090 HInvoke* invoke) {
1091 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001092 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -04001093 kIntrinsified);
1094 InvokeRuntimeCallingConvention calling_convention;
1095 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
1096 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
1097 locations->SetOut(Location::FpuRegisterLocation(XMM0));
1098}
1099
1100void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
1101 CreateFPFPToFPCallLocations(arena_, invoke);
1102}
1103
1104void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
1105 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
1106}
1107
1108void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
1109 CreateFPFPToFPCallLocations(arena_, invoke);
1110}
1111
1112void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
1113 GenFPToFPCall(invoke, codegen_, kQuickHypot);
1114}
1115
1116void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
1117 CreateFPFPToFPCallLocations(arena_, invoke);
1118}
1119
1120void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
1121 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
1122}
1123
Mark Mendell6bc53a92015-07-01 14:26:52 -04001124void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1125 // We need at least two of the positions or length to be an integer constant,
1126 // or else we won't have enough free registers.
1127 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1128 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1129 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1130
1131 int num_constants =
1132 ((src_pos != nullptr) ? 1 : 0)
1133 + ((dest_pos != nullptr) ? 1 : 0)
1134 + ((length != nullptr) ? 1 : 0);
1135
1136 if (num_constants < 2) {
1137 // Not enough free registers.
1138 return;
1139 }
1140
1141 // As long as we are checking, we might as well check to see if the src and dest
1142 // positions are >= 0.
1143 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1144 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1145 // We will have to fail anyways.
1146 return;
1147 }
1148
1149 // And since we are already checking, check the length too.
1150 if (length != nullptr) {
1151 int32_t len = length->GetValue();
1152 if (len < 0) {
1153 // Just call as normal.
1154 return;
1155 }
1156 }
1157
1158 // Okay, it is safe to generate inline code.
1159 LocationSummary* locations =
1160 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
1161 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1162 locations->SetInAt(0, Location::RequiresRegister());
1163 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1164 locations->SetInAt(2, Location::RequiresRegister());
1165 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1166 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1167
1168 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1169 locations->AddTemp(Location::RegisterLocation(ESI));
1170 locations->AddTemp(Location::RegisterLocation(EDI));
1171 locations->AddTemp(Location::RegisterLocation(ECX));
1172}
1173
1174static void CheckPosition(X86Assembler* assembler,
1175 Location pos,
1176 Register input,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001177 Location length,
Andreas Gampe85b62f22015-09-09 13:15:38 -07001178 SlowPathCode* slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001179 Register temp,
1180 bool length_is_input_length = false) {
1181 // Where is the length in the Array?
Mark Mendell6bc53a92015-07-01 14:26:52 -04001182 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1183
1184 if (pos.IsConstant()) {
1185 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1186 if (pos_const == 0) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001187 if (!length_is_input_length) {
1188 // Check that length(input) >= length.
1189 if (length.IsConstant()) {
1190 __ cmpl(Address(input, length_offset),
1191 Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1192 } else {
1193 __ cmpl(Address(input, length_offset), length.AsRegister<Register>());
1194 }
1195 __ j(kLess, slow_path->GetEntryLabel());
1196 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001197 } else {
1198 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001199 __ movl(temp, Address(input, length_offset));
1200 __ subl(temp, Immediate(pos_const));
Mark Mendell6bc53a92015-07-01 14:26:52 -04001201 __ j(kLess, slow_path->GetEntryLabel());
1202
1203 // Check that (length(input) - pos) >= length.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001204 if (length.IsConstant()) {
1205 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1206 } else {
1207 __ cmpl(temp, length.AsRegister<Register>());
1208 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001209 __ j(kLess, slow_path->GetEntryLabel());
1210 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001211 } else if (length_is_input_length) {
1212 // The only way the copy can succeed is if pos is zero.
1213 Register pos_reg = pos.AsRegister<Register>();
1214 __ testl(pos_reg, pos_reg);
1215 __ j(kNotEqual, slow_path->GetEntryLabel());
Mark Mendell6bc53a92015-07-01 14:26:52 -04001216 } else {
1217 // Check that pos >= 0.
1218 Register pos_reg = pos.AsRegister<Register>();
1219 __ testl(pos_reg, pos_reg);
1220 __ j(kLess, slow_path->GetEntryLabel());
1221
1222 // Check that pos <= length(input).
1223 __ cmpl(Address(input, length_offset), pos_reg);
1224 __ j(kLess, slow_path->GetEntryLabel());
1225
1226 // Check that (length(input) - pos) >= length.
1227 __ movl(temp, Address(input, length_offset));
1228 __ subl(temp, pos_reg);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001229 if (length.IsConstant()) {
1230 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1231 } else {
1232 __ cmpl(temp, length.AsRegister<Register>());
1233 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001234 __ j(kLess, slow_path->GetEntryLabel());
1235 }
1236}
1237
1238void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1239 X86Assembler* assembler = GetAssembler();
1240 LocationSummary* locations = invoke->GetLocations();
1241
1242 Register src = locations->InAt(0).AsRegister<Register>();
1243 Location srcPos = locations->InAt(1);
1244 Register dest = locations->InAt(2).AsRegister<Register>();
1245 Location destPos = locations->InAt(3);
1246 Location length = locations->InAt(4);
1247
1248 // Temporaries that we need for MOVSW.
1249 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1250 DCHECK_EQ(src_base, ESI);
1251 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1252 DCHECK_EQ(dest_base, EDI);
1253 Register count = locations->GetTemp(2).AsRegister<Register>();
1254 DCHECK_EQ(count, ECX);
1255
Andreas Gampe85b62f22015-09-09 13:15:38 -07001256 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001257 codegen_->AddSlowPath(slow_path);
1258
1259 // Bail out if the source and destination are the same (to handle overlap).
1260 __ cmpl(src, dest);
1261 __ j(kEqual, slow_path->GetEntryLabel());
1262
1263 // Bail out if the source is null.
1264 __ testl(src, src);
1265 __ j(kEqual, slow_path->GetEntryLabel());
1266
1267 // Bail out if the destination is null.
1268 __ testl(dest, dest);
1269 __ j(kEqual, slow_path->GetEntryLabel());
1270
1271 // If the length is negative, bail out.
1272 // We have already checked in the LocationsBuilder for the constant case.
1273 if (!length.IsConstant()) {
1274 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1275 __ j(kLess, slow_path->GetEntryLabel());
1276 }
1277
1278 // We need the count in ECX.
1279 if (length.IsConstant()) {
1280 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1281 } else {
1282 __ movl(count, length.AsRegister<Register>());
1283 }
1284
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001285 // Validity checks: source. Use src_base as a temporary register.
1286 CheckPosition(assembler, srcPos, src, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001287
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001288 // Validity checks: dest. Use src_base as a temporary register.
1289 CheckPosition(assembler, destPos, dest, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001290
1291 // Okay, everything checks out. Finally time to do the copy.
1292 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1293 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1294 DCHECK_EQ(char_size, 2u);
1295
1296 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1297
1298 if (srcPos.IsConstant()) {
1299 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1300 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1301 } else {
1302 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1303 ScaleFactor::TIMES_2, data_offset));
1304 }
1305 if (destPos.IsConstant()) {
1306 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1307
1308 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1309 } else {
1310 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1311 ScaleFactor::TIMES_2, data_offset));
1312 }
1313
1314 // Do the move.
1315 __ rep_movsw();
1316
1317 __ Bind(slow_path->GetExitLabel());
1318}
1319
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001320void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1321 // The inputs plus one temp.
1322 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001323 LocationSummary::kCallOnMainAndSlowPath,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001324 kIntrinsified);
1325 InvokeRuntimeCallingConvention calling_convention;
1326 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1327 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1328 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001329}
1330
1331void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1332 X86Assembler* assembler = GetAssembler();
1333 LocationSummary* locations = invoke->GetLocations();
1334
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001335 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001336 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001337
1338 Register argument = locations->InAt(1).AsRegister<Register>();
1339 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001340 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001341 codegen_->AddSlowPath(slow_path);
1342 __ j(kEqual, slow_path->GetEntryLabel());
1343
Serban Constantinescuba45db02016-07-12 22:53:02 +01001344 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001345 __ Bind(slow_path->GetExitLabel());
1346}
1347
Agi Csakid7138c82015-08-13 17:46:44 -07001348void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1349 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1350 LocationSummary::kNoCall,
1351 kIntrinsified);
1352 locations->SetInAt(0, Location::RequiresRegister());
1353 locations->SetInAt(1, Location::RequiresRegister());
1354
1355 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1356 locations->AddTemp(Location::RegisterLocation(ECX));
1357 locations->AddTemp(Location::RegisterLocation(EDI));
1358
1359 // Set output, ESI needed for repe_cmpsl instruction anyways.
1360 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1361}
1362
1363void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1364 X86Assembler* assembler = GetAssembler();
1365 LocationSummary* locations = invoke->GetLocations();
1366
1367 Register str = locations->InAt(0).AsRegister<Register>();
1368 Register arg = locations->InAt(1).AsRegister<Register>();
1369 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1370 Register edi = locations->GetTemp(1).AsRegister<Register>();
1371 Register esi = locations->Out().AsRegister<Register>();
1372
Mark Mendell0c9497d2015-08-21 09:30:05 -04001373 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001374
1375 // Get offsets of count, value, and class fields within a string object.
1376 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1377 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1378 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1379
1380 // Note that the null check must have been done earlier.
1381 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1382
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001383 StringEqualsOptimizations optimizations(invoke);
1384 if (!optimizations.GetArgumentNotNull()) {
1385 // Check if input is null, return false if it is.
1386 __ testl(arg, arg);
1387 __ j(kEqual, &return_false);
1388 }
Agi Csakid7138c82015-08-13 17:46:44 -07001389
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001390 if (!optimizations.GetArgumentIsString()) {
Vladimir Marko53b52002016-05-24 19:30:45 +01001391 // Instanceof check for the argument by comparing class fields.
1392 // All string objects must have the same type since String cannot be subclassed.
1393 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1394 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001395 __ movl(ecx, Address(str, class_offset));
1396 __ cmpl(ecx, Address(arg, class_offset));
1397 __ j(kNotEqual, &return_false);
1398 }
Agi Csakid7138c82015-08-13 17:46:44 -07001399
1400 // Reference equality check, return true if same reference.
1401 __ cmpl(str, arg);
1402 __ j(kEqual, &return_true);
1403
1404 // Load length of receiver string.
1405 __ movl(ecx, Address(str, count_offset));
1406 // Check if lengths are equal, return false if they're not.
1407 __ cmpl(ecx, Address(arg, count_offset));
1408 __ j(kNotEqual, &return_false);
1409 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001410 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001411
1412 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1413 __ leal(esi, Address(str, value_offset));
1414 __ leal(edi, Address(arg, value_offset));
1415
1416 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1417 __ addl(ecx, Immediate(1));
1418 __ shrl(ecx, Immediate(1));
1419
1420 // Assertions that must hold in order to compare strings 2 characters at a time.
1421 DCHECK_ALIGNED(value_offset, 4);
1422 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1423
1424 // Loop to compare strings two characters at a time starting at the beginning of the string.
1425 __ repe_cmpsl();
1426 // If strings are not equal, zero flag will be cleared.
1427 __ j(kNotEqual, &return_false);
1428
1429 // Return true and exit the function.
1430 // If loop does not result in returning false, we return true.
1431 __ Bind(&return_true);
1432 __ movl(esi, Immediate(1));
1433 __ jmp(&end);
1434
1435 // Return false and exit the function.
1436 __ Bind(&return_false);
1437 __ xorl(esi, esi);
1438 __ Bind(&end);
1439}
1440
Andreas Gampe21030dd2015-05-07 14:46:15 -07001441static void CreateStringIndexOfLocations(HInvoke* invoke,
1442 ArenaAllocator* allocator,
1443 bool start_at_zero) {
1444 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1445 LocationSummary::kCallOnSlowPath,
1446 kIntrinsified);
1447 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1448 locations->SetInAt(0, Location::RegisterLocation(EDI));
1449 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1450 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1451 // of the instruction explicitly.
1452 // Note: This works as we don't clobber EAX anywhere.
1453 locations->SetInAt(1, Location::RegisterLocation(EAX));
1454 if (!start_at_zero) {
1455 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1456 }
1457 // As we clobber EDI during execution anyways, also use it as the output.
1458 locations->SetOut(Location::SameAsFirstInput());
1459
1460 // repne scasw uses ECX as the counter.
1461 locations->AddTemp(Location::RegisterLocation(ECX));
1462 // Need another temporary to be able to compute the result.
1463 locations->AddTemp(Location::RequiresRegister());
1464}
1465
1466static void GenerateStringIndexOf(HInvoke* invoke,
1467 X86Assembler* assembler,
1468 CodeGeneratorX86* codegen,
1469 ArenaAllocator* allocator,
1470 bool start_at_zero) {
1471 LocationSummary* locations = invoke->GetLocations();
1472
1473 // Note that the null check must have been done earlier.
1474 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1475
1476 Register string_obj = locations->InAt(0).AsRegister<Register>();
1477 Register search_value = locations->InAt(1).AsRegister<Register>();
1478 Register counter = locations->GetTemp(0).AsRegister<Register>();
1479 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1480 Register out = locations->Out().AsRegister<Register>();
1481
1482 // Check our assumptions for registers.
1483 DCHECK_EQ(string_obj, EDI);
1484 DCHECK_EQ(search_value, EAX);
1485 DCHECK_EQ(counter, ECX);
1486 DCHECK_EQ(out, EDI);
1487
1488 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001489 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001490 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001491 HInstruction* code_point = invoke->InputAt(1);
1492 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001493 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampe21030dd2015-05-07 14:46:15 -07001494 std::numeric_limits<uint16_t>::max()) {
1495 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1496 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1497 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1498 codegen->AddSlowPath(slow_path);
1499 __ jmp(slow_path->GetEntryLabel());
1500 __ Bind(slow_path->GetExitLabel());
1501 return;
1502 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001503 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampe21030dd2015-05-07 14:46:15 -07001504 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1505 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1506 codegen->AddSlowPath(slow_path);
1507 __ j(kAbove, slow_path->GetEntryLabel());
1508 }
1509
1510 // From here down, we know that we are looking for a char that fits in 16 bits.
1511 // Location of reference to data array within the String object.
1512 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1513 // Location of count within the String object.
1514 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1515
1516 // Load string length, i.e., the count field of the string.
1517 __ movl(string_length, Address(string_obj, count_offset));
1518
1519 // Do a zero-length check.
1520 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001521 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001522 __ testl(string_length, string_length);
1523 __ j(kEqual, &not_found_label);
1524
1525 if (start_at_zero) {
1526 // Number of chars to scan is the same as the string length.
1527 __ movl(counter, string_length);
1528
1529 // Move to the start of the string.
1530 __ addl(string_obj, Immediate(value_offset));
1531 } else {
1532 Register start_index = locations->InAt(2).AsRegister<Register>();
1533
1534 // Do a start_index check.
1535 __ cmpl(start_index, string_length);
1536 __ j(kGreaterEqual, &not_found_label);
1537
1538 // Ensure we have a start index >= 0;
1539 __ xorl(counter, counter);
1540 __ cmpl(start_index, Immediate(0));
1541 __ cmovl(kGreater, counter, start_index);
1542
1543 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1544 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1545
1546 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1547 // compare.
1548 __ negl(counter);
1549 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1550 }
1551
1552 // Everything is set up for repne scasw:
1553 // * Comparison address in EDI.
1554 // * Counter in ECX.
1555 __ repne_scasw();
1556
1557 // Did we find a match?
1558 __ j(kNotEqual, &not_found_label);
1559
1560 // Yes, we matched. Compute the index of the result.
1561 __ subl(string_length, counter);
1562 __ leal(out, Address(string_length, -1));
1563
Mark Mendell0c9497d2015-08-21 09:30:05 -04001564 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001565 __ jmp(&done);
1566
1567 // Failed to match; return -1.
1568 __ Bind(&not_found_label);
1569 __ movl(out, Immediate(-1));
1570
1571 // And join up at the end.
1572 __ Bind(&done);
1573 if (slow_path != nullptr) {
1574 __ Bind(slow_path->GetExitLabel());
1575 }
1576}
1577
1578void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001579 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001580}
1581
1582void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001583 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001584}
1585
1586void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001587 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001588}
1589
1590void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001591 GenerateStringIndexOf(
1592 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001593}
1594
Jeff Hao848f70a2014-01-15 13:49:50 -08001595void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1596 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001597 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001598 kIntrinsified);
1599 InvokeRuntimeCallingConvention calling_convention;
1600 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1601 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1602 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1603 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1604 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001605}
1606
1607void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1608 X86Assembler* assembler = GetAssembler();
1609 LocationSummary* locations = invoke->GetLocations();
1610
1611 Register byte_array = locations->InAt(0).AsRegister<Register>();
1612 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001613 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001614 codegen_->AddSlowPath(slow_path);
1615 __ j(kEqual, slow_path->GetEntryLabel());
1616
Serban Constantinescuba45db02016-07-12 22:53:02 +01001617 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001618 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001619 __ Bind(slow_path->GetExitLabel());
1620}
1621
1622void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1623 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001624 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001625 kIntrinsified);
1626 InvokeRuntimeCallingConvention calling_convention;
1627 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1628 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1629 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1630 locations->SetOut(Location::RegisterLocation(EAX));
1631}
1632
1633void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001634 // No need to emit code checking whether `locations->InAt(2)` is a null
1635 // pointer, as callers of the native method
1636 //
1637 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1638 //
1639 // all include a null check on `data` before calling that method.
Serban Constantinescuba45db02016-07-12 22:53:02 +01001640 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001641 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001642}
1643
1644void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1645 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001646 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001647 kIntrinsified);
1648 InvokeRuntimeCallingConvention calling_convention;
1649 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1650 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001651}
1652
1653void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1654 X86Assembler* assembler = GetAssembler();
1655 LocationSummary* locations = invoke->GetLocations();
1656
1657 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1658 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001659 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001660 codegen_->AddSlowPath(slow_path);
1661 __ j(kEqual, slow_path->GetEntryLabel());
1662
Serban Constantinescuba45db02016-07-12 22:53:02 +01001663 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001664 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001665 __ Bind(slow_path->GetExitLabel());
1666}
1667
Mark Mendell8f8926a2015-08-17 11:39:06 -04001668void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1669 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1670 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1671 LocationSummary::kNoCall,
1672 kIntrinsified);
1673 locations->SetInAt(0, Location::RequiresRegister());
1674 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1675 // Place srcEnd in ECX to save a move below.
1676 locations->SetInAt(2, Location::RegisterLocation(ECX));
1677 locations->SetInAt(3, Location::RequiresRegister());
1678 locations->SetInAt(4, Location::RequiresRegister());
1679
1680 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1681 // We don't have enough registers to also grab ECX, so handle below.
1682 locations->AddTemp(Location::RegisterLocation(ESI));
1683 locations->AddTemp(Location::RegisterLocation(EDI));
1684}
1685
1686void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1687 X86Assembler* assembler = GetAssembler();
1688 LocationSummary* locations = invoke->GetLocations();
1689
1690 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1691 // Location of data in char array buffer.
1692 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1693 // Location of char array data in string.
1694 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1695
1696 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1697 Register obj = locations->InAt(0).AsRegister<Register>();
1698 Location srcBegin = locations->InAt(1);
1699 int srcBegin_value =
1700 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1701 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1702 Register dst = locations->InAt(3).AsRegister<Register>();
1703 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1704
1705 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1706 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1707 DCHECK_EQ(char_size, 2u);
1708
1709 // Compute the address of the destination buffer.
1710 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1711
1712 // Compute the address of the source string.
1713 if (srcBegin.IsConstant()) {
1714 // Compute the address of the source string by adding the number of chars from
1715 // the source beginning to the value offset of a string.
1716 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1717 } else {
1718 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1719 ScaleFactor::TIMES_2, value_offset));
1720 }
1721
1722 // Compute the number of chars (words) to move.
1723 // Now is the time to save ECX, since we don't know if it will be used later.
1724 __ pushl(ECX);
1725 int stack_adjust = kX86WordSize;
1726 __ cfi().AdjustCFAOffset(stack_adjust);
1727 DCHECK_EQ(srcEnd, ECX);
1728 if (srcBegin.IsConstant()) {
1729 if (srcBegin_value != 0) {
1730 __ subl(ECX, Immediate(srcBegin_value));
1731 }
1732 } else {
1733 DCHECK(srcBegin.IsRegister());
1734 __ subl(ECX, srcBegin.AsRegister<Register>());
1735 }
1736
1737 // Do the move.
1738 __ rep_movsw();
1739
1740 // And restore ECX.
1741 __ popl(ECX);
1742 __ cfi().AdjustCFAOffset(-stack_adjust);
1743}
1744
Mark Mendell09ed1a32015-03-25 08:30:06 -04001745static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1746 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1747 Location out_loc = locations->Out();
1748 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1749 // to avoid a SIGBUS.
1750 switch (size) {
1751 case Primitive::kPrimByte:
1752 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1753 break;
1754 case Primitive::kPrimShort:
1755 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1756 break;
1757 case Primitive::kPrimInt:
1758 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1759 break;
1760 case Primitive::kPrimLong:
1761 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1762 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1763 break;
1764 default:
1765 LOG(FATAL) << "Type not recognized for peek: " << size;
1766 UNREACHABLE();
1767 }
1768}
1769
1770void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1771 CreateLongToIntLocations(arena_, invoke);
1772}
1773
1774void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1775 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1776}
1777
1778void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1779 CreateLongToIntLocations(arena_, invoke);
1780}
1781
1782void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1783 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1784}
1785
1786void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1787 CreateLongToLongLocations(arena_, invoke);
1788}
1789
1790void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1791 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1792}
1793
1794void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1795 CreateLongToIntLocations(arena_, invoke);
1796}
1797
1798void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1799 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1800}
1801
1802static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1803 HInvoke* invoke) {
1804 LocationSummary* locations = new (arena) LocationSummary(invoke,
1805 LocationSummary::kNoCall,
1806 kIntrinsified);
1807 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001808 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001809 if (size == Primitive::kPrimByte) {
1810 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1811 } else {
1812 locations->SetInAt(1, Location::RegisterOrConstant(value));
1813 }
1814}
1815
1816static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1817 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1818 Location value_loc = locations->InAt(1);
1819 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1820 // to avoid a SIGBUS.
1821 switch (size) {
1822 case Primitive::kPrimByte:
1823 if (value_loc.IsConstant()) {
1824 __ movb(Address(address, 0),
1825 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1826 } else {
1827 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1828 }
1829 break;
1830 case Primitive::kPrimShort:
1831 if (value_loc.IsConstant()) {
1832 __ movw(Address(address, 0),
1833 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1834 } else {
1835 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1836 }
1837 break;
1838 case Primitive::kPrimInt:
1839 if (value_loc.IsConstant()) {
1840 __ movl(Address(address, 0),
1841 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1842 } else {
1843 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1844 }
1845 break;
1846 case Primitive::kPrimLong:
1847 if (value_loc.IsConstant()) {
1848 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1849 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1850 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1851 } else {
1852 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1853 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1854 }
1855 break;
1856 default:
1857 LOG(FATAL) << "Type not recognized for poke: " << size;
1858 UNREACHABLE();
1859 }
1860}
1861
1862void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1863 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1864}
1865
1866void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1867 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1868}
1869
1870void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1871 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1872}
1873
1874void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1875 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1876}
1877
1878void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1879 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1880}
1881
1882void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1883 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1884}
1885
1886void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1887 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1888}
1889
1890void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1891 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1892}
1893
1894void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1895 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1896 LocationSummary::kNoCall,
1897 kIntrinsified);
1898 locations->SetOut(Location::RequiresRegister());
1899}
1900
1901void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1902 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07001903 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>()));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001904}
1905
Roland Levillain0d5a2812015-11-13 10:07:31 +00001906static void GenUnsafeGet(HInvoke* invoke,
1907 Primitive::Type type,
1908 bool is_volatile,
1909 CodeGeneratorX86* codegen) {
1910 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1911 LocationSummary* locations = invoke->GetLocations();
1912 Location base_loc = locations->InAt(1);
1913 Register base = base_loc.AsRegister<Register>();
1914 Location offset_loc = locations->InAt(2);
1915 Register offset = offset_loc.AsRegisterPairLow<Register>();
1916 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001917
1918 switch (type) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00001919 case Primitive::kPrimInt: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001920 Register output = output_loc.AsRegister<Register>();
1921 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain7c1559a2015-12-15 10:55:36 +00001922 break;
1923 }
1924
1925 case Primitive::kPrimNot: {
1926 Register output = output_loc.AsRegister<Register>();
1927 if (kEmitCompilerReadBarrier) {
1928 if (kUseBakerReadBarrier) {
Sang, Chunlei0fcd2b82016-04-05 17:12:59 +08001929 Address src(base, offset, ScaleFactor::TIMES_1, 0);
1930 codegen->GenerateReferenceLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00001931 invoke, output_loc, base, src, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00001932 } else {
1933 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1934 codegen->GenerateReadBarrierSlow(
1935 invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
1936 }
1937 } else {
1938 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1939 __ MaybeUnpoisonHeapReference(output);
Roland Levillain4d027112015-07-01 15:41:14 +01001940 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001941 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001942 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001943
1944 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001945 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1946 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001947 if (is_volatile) {
1948 // Need to use a XMM to read atomically.
1949 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1950 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1951 __ movd(output_lo, temp);
1952 __ psrlq(temp, Immediate(32));
1953 __ movd(output_hi, temp);
1954 } else {
1955 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1956 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1957 }
1958 }
1959 break;
1960
1961 default:
1962 LOG(FATAL) << "Unsupported op size " << type;
1963 UNREACHABLE();
1964 }
1965}
1966
Roland Levillain7c1559a2015-12-15 10:55:36 +00001967static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1968 HInvoke* invoke,
1969 Primitive::Type type,
1970 bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001971 bool can_call = kEmitCompilerReadBarrier &&
1972 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1973 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001974 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001975 can_call ?
1976 LocationSummary::kCallOnSlowPath :
1977 LocationSummary::kNoCall,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001978 kIntrinsified);
1979 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1980 locations->SetInAt(1, Location::RequiresRegister());
1981 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillain7c1559a2015-12-15 10:55:36 +00001982 if (type == Primitive::kPrimLong) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001983 if (is_volatile) {
1984 // Need to use XMM to read volatile.
1985 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain3d312422016-06-23 13:53:42 +01001986 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001987 } else {
1988 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1989 }
1990 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001991 locations->SetOut(Location::RequiresRegister(),
1992 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001993 }
1994}
1995
1996void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00001997 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001998}
1999void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002000 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002001}
2002void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002003 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002004}
2005void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002006 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002007}
2008void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002009 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002010}
2011void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002012 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002013}
2014
2015
2016void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002017 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002018}
2019void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002020 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002021}
2022void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002023 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002024}
2025void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002026 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002027}
2028void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002029 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002030}
2031void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002032 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002033}
2034
2035
2036static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
2037 Primitive::Type type,
2038 HInvoke* invoke,
2039 bool is_volatile) {
2040 LocationSummary* locations = new (arena) LocationSummary(invoke,
2041 LocationSummary::kNoCall,
2042 kIntrinsified);
2043 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2044 locations->SetInAt(1, Location::RequiresRegister());
2045 locations->SetInAt(2, Location::RequiresRegister());
2046 locations->SetInAt(3, Location::RequiresRegister());
2047 if (type == Primitive::kPrimNot) {
2048 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01002049 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04002050 // Ensure the value is in a byte register.
2051 locations->AddTemp(Location::RegisterLocation(ECX));
2052 } else if (type == Primitive::kPrimLong && is_volatile) {
2053 locations->AddTemp(Location::RequiresFpuRegister());
2054 locations->AddTemp(Location::RequiresFpuRegister());
2055 }
2056}
2057
2058void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002059 CreateIntIntIntIntToVoidPlusTempsLocations(
2060 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002061}
2062void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002063 CreateIntIntIntIntToVoidPlusTempsLocations(
2064 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002065}
2066void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002067 CreateIntIntIntIntToVoidPlusTempsLocations(
2068 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002069}
2070void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002071 CreateIntIntIntIntToVoidPlusTempsLocations(
2072 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002073}
2074void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002075 CreateIntIntIntIntToVoidPlusTempsLocations(
2076 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002077}
2078void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002079 CreateIntIntIntIntToVoidPlusTempsLocations(
2080 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002081}
2082void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002083 CreateIntIntIntIntToVoidPlusTempsLocations(
2084 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002085}
2086void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002087 CreateIntIntIntIntToVoidPlusTempsLocations(
2088 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002089}
2090void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002091 CreateIntIntIntIntToVoidPlusTempsLocations(
2092 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002093}
2094
2095// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
2096// memory model.
2097static void GenUnsafePut(LocationSummary* locations,
2098 Primitive::Type type,
2099 bool is_volatile,
2100 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002101 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04002102 Register base = locations->InAt(1).AsRegister<Register>();
2103 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2104 Location value_loc = locations->InAt(3);
2105
2106 if (type == Primitive::kPrimLong) {
2107 Register value_lo = value_loc.AsRegisterPairLow<Register>();
2108 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
2109 if (is_volatile) {
2110 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2111 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
2112 __ movd(temp1, value_lo);
2113 __ movd(temp2, value_hi);
2114 __ punpckldq(temp1, temp2);
2115 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
2116 } else {
2117 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
2118 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
2119 }
Roland Levillain4d027112015-07-01 15:41:14 +01002120 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2121 Register temp = locations->GetTemp(0).AsRegister<Register>();
2122 __ movl(temp, value_loc.AsRegister<Register>());
2123 __ PoisonHeapReference(temp);
2124 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002125 } else {
2126 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
2127 }
2128
2129 if (is_volatile) {
Mark P Mendell17077d82015-12-16 19:15:59 +00002130 codegen->MemoryFence();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002131 }
2132
2133 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002134 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04002135 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2136 locations->GetTemp(1).AsRegister<Register>(),
2137 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002138 value_loc.AsRegister<Register>(),
2139 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002140 }
2141}
2142
2143void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002144 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002145}
2146void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002147 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002148}
2149void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002150 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002151}
2152void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002153 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002154}
2155void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002156 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002157}
2158void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002159 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002160}
2161void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002162 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002163}
2164void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002165 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002166}
2167void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002168 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002169}
2170
Mark Mendell58d25fd2015-04-03 14:52:31 -04002171static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
2172 HInvoke* invoke) {
2173 LocationSummary* locations = new (arena) LocationSummary(invoke,
2174 LocationSummary::kNoCall,
2175 kIntrinsified);
2176 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2177 locations->SetInAt(1, Location::RequiresRegister());
2178 // Offset is a long, but in 32 bit mode, we only need the low word.
2179 // Can we update the invoke here to remove a TypeConvert to Long?
2180 locations->SetInAt(2, Location::RequiresRegister());
2181 // Expected value must be in EAX or EDX:EAX.
2182 // For long, new value must be in ECX:EBX.
2183 if (type == Primitive::kPrimLong) {
2184 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
2185 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
2186 } else {
2187 locations->SetInAt(3, Location::RegisterLocation(EAX));
2188 locations->SetInAt(4, Location::RequiresRegister());
2189 }
2190
2191 // Force a byte register for the output.
2192 locations->SetOut(Location::RegisterLocation(EAX));
2193 if (type == Primitive::kPrimNot) {
2194 // Need temp registers for card-marking.
Roland Levillainb488b782015-10-22 11:38:49 +01002195 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002196 // Need a byte register for marking.
2197 locations->AddTemp(Location::RegisterLocation(ECX));
2198 }
2199}
2200
2201void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
2202 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
2203}
2204
2205void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
2206 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
2207}
2208
2209void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00002210 // The UnsafeCASObject intrinsic is missing a read barrier, and
2211 // therefore sometimes does not work as expected (b/25883050).
2212 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +01002213 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +00002214 //
Roland Levillain3d312422016-06-23 13:53:42 +01002215 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
Roland Levillain391b8662015-12-18 11:43:38 +00002216 // this intrinsic.
2217 if (kEmitCompilerReadBarrier) {
2218 return;
2219 }
2220
Mark Mendell58d25fd2015-04-03 14:52:31 -04002221 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
2222}
2223
2224static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002225 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002226 LocationSummary* locations = invoke->GetLocations();
2227
2228 Register base = locations->InAt(1).AsRegister<Register>();
2229 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2230 Location out = locations->Out();
2231 DCHECK_EQ(out.AsRegister<Register>(), EAX);
2232
Roland Levillainb488b782015-10-22 11:38:49 +01002233 if (type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01002234 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01002235 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01002236 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002237 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01002238
Roland Levillainb488b782015-10-22 11:38:49 +01002239 // Mark card for object assuming new value is stored.
2240 bool value_can_be_null = true; // TODO: Worth finding out this information?
2241 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2242 locations->GetTemp(1).AsRegister<Register>(),
2243 base,
2244 value,
2245 value_can_be_null);
2246
2247 bool base_equals_value = (base == value);
2248 if (kPoisonHeapReferences) {
2249 if (base_equals_value) {
2250 // If `base` and `value` are the same register location, move
2251 // `value` to a temporary register. This way, poisoning
2252 // `value` won't invalidate `base`.
2253 value = locations->GetTemp(0).AsRegister<Register>();
2254 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002255 }
Roland Levillainb488b782015-10-22 11:38:49 +01002256
2257 // Check that the register allocator did not assign the location
2258 // of `expected` (EAX) to `value` nor to `base`, so that heap
2259 // poisoning (when enabled) works as intended below.
2260 // - If `value` were equal to `expected`, both references would
2261 // be poisoned twice, meaning they would not be poisoned at
2262 // all, as heap poisoning uses address negation.
2263 // - If `base` were equal to `expected`, poisoning `expected`
2264 // would invalidate `base`.
2265 DCHECK_NE(value, expected);
2266 DCHECK_NE(base, expected);
2267
2268 __ PoisonHeapReference(expected);
2269 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002270 }
2271
Roland Levillain391b8662015-12-18 11:43:38 +00002272 // TODO: Add a read barrier for the reference stored in the object
2273 // before attempting the CAS, similar to the one in the
2274 // art::Unsafe_compareAndSwapObject JNI implementation.
2275 //
2276 // Note that this code is not (yet) used when read barriers are
2277 // enabled (see IntrinsicLocationsBuilderX86::VisitUnsafeCASObject).
2278 DCHECK(!kEmitCompilerReadBarrier);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002279 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002280
Roland Levillain0d5a2812015-11-13 10:07:31 +00002281 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002282 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002283
Roland Levillainb488b782015-10-22 11:38:49 +01002284 // Convert ZF into the boolean result.
2285 __ setb(kZero, out.AsRegister<Register>());
2286 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002287
Roland Levillain391b8662015-12-18 11:43:38 +00002288 // If heap poisoning is enabled, we need to unpoison the values
2289 // that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002290 if (kPoisonHeapReferences) {
2291 if (base_equals_value) {
2292 // `value` has been moved to a temporary register, no need to
2293 // unpoison it.
2294 } else {
2295 // Ensure `value` is different from `out`, so that unpoisoning
2296 // the former does not invalidate the latter.
2297 DCHECK_NE(value, out.AsRegister<Register>());
2298 __ UnpoisonHeapReference(value);
2299 }
2300 // Do not unpoison the reference contained in register
2301 // `expected`, as it is the same as register `out` (EAX).
2302 }
2303 } else {
2304 if (type == Primitive::kPrimInt) {
2305 // Ensure the expected value is in EAX (required by the CMPXCHG
2306 // instruction).
2307 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
2308 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0),
2309 locations->InAt(4).AsRegister<Register>());
2310 } else if (type == Primitive::kPrimLong) {
2311 // Ensure the expected value is in EAX:EDX and that the new
2312 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2313 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2314 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2315 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2316 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
2317 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
2318 } else {
2319 LOG(FATAL) << "Unexpected CAS type " << type;
2320 }
2321
Roland Levillain0d5a2812015-11-13 10:07:31 +00002322 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2323 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002324
2325 // Convert ZF into the boolean result.
2326 __ setb(kZero, out.AsRegister<Register>());
2327 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002328 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002329}
2330
2331void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
2332 GenCAS(Primitive::kPrimInt, invoke, codegen_);
2333}
2334
2335void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
2336 GenCAS(Primitive::kPrimLong, invoke, codegen_);
2337}
2338
2339void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01002340 // The UnsafeCASObject intrinsic is missing a read barrier, and
2341 // therefore sometimes does not work as expected (b/25883050).
2342 // Turn it off temporarily as a quick fix, until the read barrier is
2343 // implemented (see TODO in GenCAS).
2344 //
2345 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
2346 // this intrinsic.
2347 DCHECK(!kEmitCompilerReadBarrier);
2348
Mark Mendell58d25fd2015-04-03 14:52:31 -04002349 GenCAS(Primitive::kPrimNot, invoke, codegen_);
2350}
2351
2352void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
2353 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2354 LocationSummary::kNoCall,
2355 kIntrinsified);
2356 locations->SetInAt(0, Location::RequiresRegister());
2357 locations->SetOut(Location::SameAsFirstInput());
2358 locations->AddTemp(Location::RequiresRegister());
2359}
2360
2361static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2362 X86Assembler* assembler) {
2363 Immediate imm_shift(shift);
2364 Immediate imm_mask(mask);
2365 __ movl(temp, reg);
2366 __ shrl(reg, imm_shift);
2367 __ andl(temp, imm_mask);
2368 __ andl(reg, imm_mask);
2369 __ shll(temp, imm_shift);
2370 __ orl(reg, temp);
2371}
2372
2373void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002374 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002375 LocationSummary* locations = invoke->GetLocations();
2376
2377 Register reg = locations->InAt(0).AsRegister<Register>();
2378 Register temp = locations->GetTemp(0).AsRegister<Register>();
2379
2380 /*
2381 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2382 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2383 * compared to generic luni implementation which has 5 rounds of swapping bits.
2384 * x = bswap x
2385 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2386 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2387 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2388 */
2389 __ bswapl(reg);
2390 SwapBits(reg, temp, 1, 0x55555555, assembler);
2391 SwapBits(reg, temp, 2, 0x33333333, assembler);
2392 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2393}
2394
2395void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2396 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2397 LocationSummary::kNoCall,
2398 kIntrinsified);
2399 locations->SetInAt(0, Location::RequiresRegister());
2400 locations->SetOut(Location::SameAsFirstInput());
2401 locations->AddTemp(Location::RequiresRegister());
2402}
2403
2404void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002405 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002406 LocationSummary* locations = invoke->GetLocations();
2407
2408 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2409 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2410 Register temp = locations->GetTemp(0).AsRegister<Register>();
2411
2412 // We want to swap high/low, then bswap each one, and then do the same
2413 // as a 32 bit reverse.
2414 // Exchange high and low.
2415 __ movl(temp, reg_low);
2416 __ movl(reg_low, reg_high);
2417 __ movl(reg_high, temp);
2418
2419 // bit-reverse low
2420 __ bswapl(reg_low);
2421 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2422 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2423 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2424
2425 // bit-reverse high
2426 __ bswapl(reg_high);
2427 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2428 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2429 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2430}
2431
Aart Bikc39dac12016-01-21 08:59:48 -08002432static void CreateBitCountLocations(
2433 ArenaAllocator* arena, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) {
2434 if (!codegen->GetInstructionSetFeatures().HasPopCnt()) {
2435 // Do nothing if there is no popcnt support. This results in generating
2436 // a call for the intrinsic rather than direct code.
2437 return;
2438 }
2439 LocationSummary* locations = new (arena) LocationSummary(invoke,
2440 LocationSummary::kNoCall,
2441 kIntrinsified);
2442 if (is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002443 locations->AddTemp(Location::RequiresRegister());
Aart Bikc39dac12016-01-21 08:59:48 -08002444 }
Aart Bik2a946072016-01-21 12:49:00 -08002445 locations->SetInAt(0, Location::Any());
Aart Bikc39dac12016-01-21 08:59:48 -08002446 locations->SetOut(Location::RequiresRegister());
2447}
2448
Aart Bika19616e2016-02-01 18:57:58 -08002449static void GenBitCount(X86Assembler* assembler,
2450 CodeGeneratorX86* codegen,
2451 HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002452 LocationSummary* locations = invoke->GetLocations();
2453 Location src = locations->InAt(0);
2454 Register out = locations->Out().AsRegister<Register>();
2455
2456 if (invoke->InputAt(0)->IsConstant()) {
2457 // Evaluate this at compile time.
2458 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
Roland Levillainfa3912e2016-04-01 18:21:55 +01002459 int32_t result = is_long
Aart Bikc39dac12016-01-21 08:59:48 -08002460 ? POPCOUNT(static_cast<uint64_t>(value))
2461 : POPCOUNT(static_cast<uint32_t>(value));
Roland Levillainfa3912e2016-04-01 18:21:55 +01002462 codegen->Load32BitValue(out, result);
Aart Bikc39dac12016-01-21 08:59:48 -08002463 return;
2464 }
2465
2466 // Handle the non-constant cases.
2467 if (!is_long) {
2468 if (src.IsRegister()) {
2469 __ popcntl(out, src.AsRegister<Register>());
2470 } else {
2471 DCHECK(src.IsStackSlot());
2472 __ popcntl(out, Address(ESP, src.GetStackIndex()));
2473 }
Aart Bik2a946072016-01-21 12:49:00 -08002474 } else {
2475 // The 64-bit case needs to worry about two parts.
2476 Register temp = locations->GetTemp(0).AsRegister<Register>();
2477 if (src.IsRegisterPair()) {
2478 __ popcntl(temp, src.AsRegisterPairLow<Register>());
2479 __ popcntl(out, src.AsRegisterPairHigh<Register>());
2480 } else {
2481 DCHECK(src.IsDoubleStackSlot());
2482 __ popcntl(temp, Address(ESP, src.GetStackIndex()));
2483 __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize)));
2484 }
2485 __ addl(out, temp);
Aart Bikc39dac12016-01-21 08:59:48 -08002486 }
Aart Bikc39dac12016-01-21 08:59:48 -08002487}
2488
2489void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) {
2490 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ false);
2491}
2492
2493void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002494 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002495}
2496
2497void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) {
2498 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ true);
2499}
2500
2501void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002502 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002503}
2504
Mark Mendelld5897672015-08-12 21:16:41 -04002505static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2506 LocationSummary* locations = new (arena) LocationSummary(invoke,
2507 LocationSummary::kNoCall,
2508 kIntrinsified);
2509 if (is_long) {
2510 locations->SetInAt(0, Location::RequiresRegister());
2511 } else {
2512 locations->SetInAt(0, Location::Any());
2513 }
2514 locations->SetOut(Location::RequiresRegister());
2515}
2516
Aart Bika19616e2016-02-01 18:57:58 -08002517static void GenLeadingZeros(X86Assembler* assembler,
2518 CodeGeneratorX86* codegen,
2519 HInvoke* invoke, bool is_long) {
Mark Mendelld5897672015-08-12 21:16:41 -04002520 LocationSummary* locations = invoke->GetLocations();
2521 Location src = locations->InAt(0);
2522 Register out = locations->Out().AsRegister<Register>();
2523
2524 if (invoke->InputAt(0)->IsConstant()) {
2525 // Evaluate this at compile time.
2526 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2527 if (value == 0) {
2528 value = is_long ? 64 : 32;
2529 } else {
2530 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2531 }
Aart Bika19616e2016-02-01 18:57:58 -08002532 codegen->Load32BitValue(out, value);
Mark Mendelld5897672015-08-12 21:16:41 -04002533 return;
2534 }
2535
2536 // Handle the non-constant cases.
2537 if (!is_long) {
2538 if (src.IsRegister()) {
2539 __ bsrl(out, src.AsRegister<Register>());
2540 } else {
2541 DCHECK(src.IsStackSlot());
2542 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2543 }
2544
2545 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002546 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002547 __ j(kEqual, &all_zeroes);
2548
2549 // Correct the result from BSR to get the final CLZ result.
2550 __ xorl(out, Immediate(31));
2551 __ jmp(&done);
2552
2553 // Fix the zero case with the expected result.
2554 __ Bind(&all_zeroes);
2555 __ movl(out, Immediate(32));
2556
2557 __ Bind(&done);
2558 return;
2559 }
2560
2561 // 64 bit case needs to worry about both parts of the register.
2562 DCHECK(src.IsRegisterPair());
2563 Register src_lo = src.AsRegisterPairLow<Register>();
2564 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002565 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002566
2567 // Is the high word zero?
2568 __ testl(src_hi, src_hi);
2569 __ j(kEqual, &handle_low);
2570
2571 // High word is not zero. We know that the BSR result is defined in this case.
2572 __ bsrl(out, src_hi);
2573
2574 // Correct the result from BSR to get the final CLZ result.
2575 __ xorl(out, Immediate(31));
2576 __ jmp(&done);
2577
2578 // High word was zero. We have to compute the low word count and add 32.
2579 __ Bind(&handle_low);
2580 __ bsrl(out, src_lo);
2581 __ j(kEqual, &all_zeroes);
2582
2583 // We had a valid result. Use an XOR to both correct the result and add 32.
2584 __ xorl(out, Immediate(63));
2585 __ jmp(&done);
2586
2587 // All zero case.
2588 __ Bind(&all_zeroes);
2589 __ movl(out, Immediate(64));
2590
2591 __ Bind(&done);
2592}
2593
2594void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2595 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2596}
2597
2598void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002599 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002600}
2601
2602void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2603 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2604}
2605
2606void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002607 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002608}
2609
Mark Mendell2d554792015-09-15 21:45:18 -04002610static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2611 LocationSummary* locations = new (arena) LocationSummary(invoke,
2612 LocationSummary::kNoCall,
2613 kIntrinsified);
2614 if (is_long) {
2615 locations->SetInAt(0, Location::RequiresRegister());
2616 } else {
2617 locations->SetInAt(0, Location::Any());
2618 }
2619 locations->SetOut(Location::RequiresRegister());
2620}
2621
Aart Bika19616e2016-02-01 18:57:58 -08002622static void GenTrailingZeros(X86Assembler* assembler,
2623 CodeGeneratorX86* codegen,
2624 HInvoke* invoke, bool is_long) {
Mark Mendell2d554792015-09-15 21:45:18 -04002625 LocationSummary* locations = invoke->GetLocations();
2626 Location src = locations->InAt(0);
2627 Register out = locations->Out().AsRegister<Register>();
2628
2629 if (invoke->InputAt(0)->IsConstant()) {
2630 // Evaluate this at compile time.
2631 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2632 if (value == 0) {
2633 value = is_long ? 64 : 32;
2634 } else {
2635 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2636 }
Aart Bika19616e2016-02-01 18:57:58 -08002637 codegen->Load32BitValue(out, value);
Mark Mendell2d554792015-09-15 21:45:18 -04002638 return;
2639 }
2640
2641 // Handle the non-constant cases.
2642 if (!is_long) {
2643 if (src.IsRegister()) {
2644 __ bsfl(out, src.AsRegister<Register>());
2645 } else {
2646 DCHECK(src.IsStackSlot());
2647 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2648 }
2649
2650 // BSF sets ZF if the input was zero, and the output is undefined.
2651 NearLabel done;
2652 __ j(kNotEqual, &done);
2653
2654 // Fix the zero case with the expected result.
2655 __ movl(out, Immediate(32));
2656
2657 __ Bind(&done);
2658 return;
2659 }
2660
2661 // 64 bit case needs to worry about both parts of the register.
2662 DCHECK(src.IsRegisterPair());
2663 Register src_lo = src.AsRegisterPairLow<Register>();
2664 Register src_hi = src.AsRegisterPairHigh<Register>();
2665 NearLabel done, all_zeroes;
2666
2667 // If the low word is zero, then ZF will be set. If not, we have the answer.
2668 __ bsfl(out, src_lo);
2669 __ j(kNotEqual, &done);
2670
2671 // Low word was zero. We have to compute the high word count and add 32.
2672 __ bsfl(out, src_hi);
2673 __ j(kEqual, &all_zeroes);
2674
2675 // We had a valid result. Add 32 to account for the low word being zero.
2676 __ addl(out, Immediate(32));
2677 __ jmp(&done);
2678
2679 // All zero case.
2680 __ Bind(&all_zeroes);
2681 __ movl(out, Immediate(64));
2682
2683 __ Bind(&done);
2684}
2685
2686void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2687 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2688}
2689
2690void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002691 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002692}
2693
2694void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2695 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2696}
2697
2698void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002699 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002700}
2701
Serguei Katkov288c7a82016-05-16 11:53:15 +06002702void IntrinsicLocationsBuilderX86::VisitReferenceGetReferent(HInvoke* invoke) {
2703 if (kEmitCompilerReadBarrier) {
2704 // Do not intrinsify this call with the read barrier configuration.
2705 return;
2706 }
2707 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2708 LocationSummary::kCallOnSlowPath,
2709 kIntrinsified);
2710 locations->SetInAt(0, Location::RequiresRegister());
2711 locations->SetOut(Location::SameAsFirstInput());
2712 locations->AddTemp(Location::RequiresRegister());
2713}
2714
2715void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
2716 DCHECK(!kEmitCompilerReadBarrier);
2717 LocationSummary* locations = invoke->GetLocations();
2718 X86Assembler* assembler = GetAssembler();
2719
2720 Register obj = locations->InAt(0).AsRegister<Register>();
2721 Register out = locations->Out().AsRegister<Register>();
2722
2723 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2724 codegen_->AddSlowPath(slow_path);
2725
2726 // Load ArtMethod first.
2727 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2728 DCHECK(invoke_direct != nullptr);
2729 Location temp_loc = codegen_->GenerateCalleeMethodStaticOrDirectCall(
2730 invoke_direct, locations->GetTemp(0));
2731 DCHECK(temp_loc.Equals(locations->GetTemp(0)));
2732 Register temp = temp_loc.AsRegister<Register>();
2733
2734 // Now get declaring class.
2735 __ movl(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
2736
2737 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2738 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2739 DCHECK_NE(slow_path_flag_offset, 0u);
2740 DCHECK_NE(disable_flag_offset, 0u);
2741 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2742
2743 // Check static flags preventing us for using intrinsic.
2744 if (slow_path_flag_offset == disable_flag_offset + 1) {
2745 __ cmpw(Address(temp, disable_flag_offset), Immediate(0));
2746 __ j(kNotEqual, slow_path->GetEntryLabel());
2747 } else {
2748 __ cmpb(Address(temp, disable_flag_offset), Immediate(0));
2749 __ j(kNotEqual, slow_path->GetEntryLabel());
2750 __ cmpb(Address(temp, slow_path_flag_offset), Immediate(0));
2751 __ j(kNotEqual, slow_path->GetEntryLabel());
2752 }
2753
2754 // Fast path.
2755 __ movl(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
2756 codegen_->MaybeRecordImplicitNullCheck(invoke);
2757 __ MaybeUnpoisonHeapReference(out);
2758 __ Bind(slow_path->GetExitLabel());
2759}
2760
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002761static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) {
2762 return instruction->InputAt(input0) == instruction->InputAt(input1);
2763}
2764
2765void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002766 // The only read barrier implementation supporting the
2767 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2768 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002769 return;
2770 }
2771
2772 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
2773 if (invoke->GetLocations() != nullptr) {
2774 // Need a byte register for marking.
2775 invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX));
2776
2777 static constexpr size_t kSrc = 0;
2778 static constexpr size_t kSrcPos = 1;
2779 static constexpr size_t kDest = 2;
2780 static constexpr size_t kDestPos = 3;
2781 static constexpr size_t kLength = 4;
2782
2783 if (!invoke->InputAt(kSrcPos)->IsIntConstant() &&
2784 !invoke->InputAt(kDestPos)->IsIntConstant() &&
2785 !invoke->InputAt(kLength)->IsIntConstant()) {
2786 if (!IsSameInput(invoke, kSrcPos, kDestPos) &&
2787 !IsSameInput(invoke, kSrcPos, kLength) &&
2788 !IsSameInput(invoke, kDestPos, kLength) &&
2789 !IsSameInput(invoke, kSrc, kDest)) {
2790 // Not enough registers, make the length also take a stack slot.
2791 invoke->GetLocations()->SetInAt(kLength, Location::Any());
2792 }
2793 }
2794 }
2795}
2796
2797void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002798 // The only read barrier implementation supporting the
2799 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2800 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002801
2802 X86Assembler* assembler = GetAssembler();
2803 LocationSummary* locations = invoke->GetLocations();
2804
2805 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2806 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2807 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2808 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002809 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002810
2811 Register src = locations->InAt(0).AsRegister<Register>();
2812 Location src_pos = locations->InAt(1);
2813 Register dest = locations->InAt(2).AsRegister<Register>();
2814 Location dest_pos = locations->InAt(3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002815 Location length_arg = locations->InAt(4);
2816 Location length = length_arg;
2817 Location temp1_loc = locations->GetTemp(0);
2818 Register temp1 = temp1_loc.AsRegister<Register>();
2819 Location temp2_loc = locations->GetTemp(1);
2820 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002821
Roland Levillain0b671c02016-08-19 12:02:34 +01002822 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2823 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002824
2825 NearLabel conditions_on_positions_validated;
2826 SystemArrayCopyOptimizations optimizations(invoke);
2827
2828 // If source and destination are the same, we go to slow path if we need to do
2829 // forward copying.
2830 if (src_pos.IsConstant()) {
2831 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2832 if (dest_pos.IsConstant()) {
2833 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2834 if (optimizations.GetDestinationIsSource()) {
2835 // Checked when building locations.
2836 DCHECK_GE(src_pos_constant, dest_pos_constant);
2837 } else if (src_pos_constant < dest_pos_constant) {
2838 __ cmpl(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002839 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002840 }
2841 } else {
2842 if (!optimizations.GetDestinationIsSource()) {
2843 __ cmpl(src, dest);
2844 __ j(kNotEqual, &conditions_on_positions_validated);
2845 }
2846 __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002847 __ j(kGreater, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002848 }
2849 } else {
2850 if (!optimizations.GetDestinationIsSource()) {
2851 __ cmpl(src, dest);
2852 __ j(kNotEqual, &conditions_on_positions_validated);
2853 }
2854 if (dest_pos.IsConstant()) {
2855 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2856 __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002857 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002858 } else {
2859 __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002860 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002861 }
2862 }
2863
2864 __ Bind(&conditions_on_positions_validated);
2865
2866 if (!optimizations.GetSourceIsNotNull()) {
2867 // Bail out if the source is null.
2868 __ testl(src, src);
Roland Levillain0b671c02016-08-19 12:02:34 +01002869 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002870 }
2871
2872 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2873 // Bail out if the destination is null.
2874 __ testl(dest, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002875 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002876 }
2877
Roland Levillain0b671c02016-08-19 12:02:34 +01002878 Location temp3_loc = locations->GetTemp(2);
2879 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002880 if (length.IsStackSlot()) {
2881 __ movl(temp3, Address(ESP, length.GetStackIndex()));
2882 length = Location::RegisterLocation(temp3);
2883 }
2884
2885 // If the length is negative, bail out.
2886 // We have already checked in the LocationsBuilder for the constant case.
2887 if (!length.IsConstant() &&
2888 !optimizations.GetCountIsSourceLength() &&
2889 !optimizations.GetCountIsDestinationLength()) {
2890 __ testl(length.AsRegister<Register>(), length.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002891 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002892 }
2893
2894 // Validity checks: source.
2895 CheckPosition(assembler,
2896 src_pos,
2897 src,
2898 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002899 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002900 temp1,
2901 optimizations.GetCountIsSourceLength());
2902
2903 // Validity checks: dest.
2904 CheckPosition(assembler,
2905 dest_pos,
2906 dest,
2907 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002908 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002909 temp1,
2910 optimizations.GetCountIsDestinationLength());
2911
2912 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2913 // Check whether all elements of the source array are assignable to the component
2914 // type of the destination array. We do two checks: the classes are the same,
2915 // or the destination is Object[]. If none of these checks succeed, we go to the
2916 // slow path.
Roland Levillain0b671c02016-08-19 12:02:34 +01002917
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002918 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002919 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2920 // /* HeapReference<Class> */ temp1 = src->klass_
2921 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002922 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002923 // Bail out if the source is not a non primitive array.
2924 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2925 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002926 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002927 __ testl(temp1, temp1);
2928 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2929 // If heap poisoning is enabled, `temp1` has been unpoisoned
2930 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2931 } else {
2932 // /* HeapReference<Class> */ temp1 = src->klass_
2933 __ movl(temp1, Address(src, class_offset));
2934 __ MaybeUnpoisonHeapReference(temp1);
2935 // Bail out if the source is not a non primitive array.
2936 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2937 __ movl(temp1, Address(temp1, component_offset));
2938 __ testl(temp1, temp1);
2939 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2940 __ MaybeUnpoisonHeapReference(temp1);
2941 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002942 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01002943 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002944 }
2945
Roland Levillain0b671c02016-08-19 12:02:34 +01002946 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2947 if (length.Equals(Location::RegisterLocation(temp3))) {
2948 // When Baker read barriers are enabled, register `temp3`,
2949 // which in the present case contains the `length` parameter,
2950 // will be overwritten below. Make the `length` location
2951 // reference the original stack location; it will be moved
2952 // back to `temp3` later if necessary.
2953 DCHECK(length_arg.IsStackSlot());
2954 length = length_arg;
2955 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002956
Roland Levillain0b671c02016-08-19 12:02:34 +01002957 // /* HeapReference<Class> */ temp1 = dest->klass_
2958 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002959 invoke, temp1_loc, dest, class_offset, /* needs_null_check */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002960
Roland Levillain0b671c02016-08-19 12:02:34 +01002961 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2962 // Bail out if the destination is not a non primitive array.
2963 //
2964 // Register `temp1` is not trashed by the read barrier emitted
2965 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2966 // method produces a call to a ReadBarrierMarkRegX entry point,
2967 // which saves all potentially live registers, including
2968 // temporaries such a `temp1`.
2969 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2970 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002971 invoke, temp2_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002972 __ testl(temp2, temp2);
2973 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2974 // If heap poisoning is enabled, `temp2` has been unpoisoned
2975 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2976 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
2977 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2978 }
2979
2980 // For the same reason given earlier, `temp1` is not trashed by the
2981 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2982 // /* HeapReference<Class> */ temp2 = src->klass_
2983 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002984 invoke, temp2_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002985 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2986 __ cmpl(temp1, temp2);
2987
2988 if (optimizations.GetDestinationIsTypedObjectArray()) {
2989 NearLabel do_copy;
2990 __ j(kEqual, &do_copy);
2991 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2992 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002993 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002994 // We do not need to emit a read barrier for the following
2995 // heap reference load, as `temp1` is only used in a
2996 // comparison with null below, and this reference is not
2997 // kept afterwards.
2998 __ cmpl(Address(temp1, super_offset), Immediate(0));
2999 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3000 __ Bind(&do_copy);
3001 } else {
3002 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3003 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003004 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01003005 // Non read barrier code.
3006
3007 // /* HeapReference<Class> */ temp1 = dest->klass_
3008 __ movl(temp1, Address(dest, class_offset));
3009 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
3010 __ MaybeUnpoisonHeapReference(temp1);
3011 // Bail out if the destination is not a non primitive array.
3012 // /* HeapReference<Class> */ temp2 = temp1->component_type_
3013 __ movl(temp2, Address(temp1, component_offset));
3014 __ testl(temp2, temp2);
3015 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3016 __ MaybeUnpoisonHeapReference(temp2);
3017 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
3018 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3019 // Re-poison the heap reference to make the compare instruction below
3020 // compare two poisoned references.
3021 __ PoisonHeapReference(temp1);
3022 }
3023
3024 // Note: if heap poisoning is on, we are comparing two poisoned references here.
3025 __ cmpl(temp1, Address(src, class_offset));
3026
3027 if (optimizations.GetDestinationIsTypedObjectArray()) {
3028 NearLabel do_copy;
3029 __ j(kEqual, &do_copy);
3030 __ MaybeUnpoisonHeapReference(temp1);
3031 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3032 __ movl(temp1, Address(temp1, component_offset));
3033 __ MaybeUnpoisonHeapReference(temp1);
3034 __ cmpl(Address(temp1, super_offset), Immediate(0));
3035 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3036 __ Bind(&do_copy);
3037 } else {
3038 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3039 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003040 }
3041 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
3042 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
3043 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01003044 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3045 // /* HeapReference<Class> */ temp1 = src->klass_
3046 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003047 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003048 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3049 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003050 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003051 __ testl(temp1, temp1);
3052 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3053 // If heap poisoning is enabled, `temp1` has been unpoisoned
3054 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3055 } else {
3056 // /* HeapReference<Class> */ temp1 = src->klass_
3057 __ movl(temp1, Address(src, class_offset));
3058 __ MaybeUnpoisonHeapReference(temp1);
3059 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3060 __ movl(temp1, Address(temp1, component_offset));
3061 __ testl(temp1, temp1);
3062 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3063 __ MaybeUnpoisonHeapReference(temp1);
3064 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003065 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01003066 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003067 }
3068
Roland Levillain0b671c02016-08-19 12:02:34 +01003069 // Compute the base source address in `temp1`.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003070 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
3071 DCHECK_EQ(element_size, 4);
3072 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
3073 if (src_pos.IsConstant()) {
3074 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
3075 __ leal(temp1, Address(src, element_size * constant + offset));
3076 } else {
3077 __ leal(temp1, Address(src, src_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3078 }
3079
Roland Levillain0b671c02016-08-19 12:02:34 +01003080 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3081 // If it is needed (in the case of the fast-path loop), the base
3082 // destination address is computed later, as `temp2` is used for
3083 // intermediate computations.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003084
Roland Levillain0b671c02016-08-19 12:02:34 +01003085 // Compute the end source address in `temp3`.
3086 if (length.IsConstant()) {
3087 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3088 __ leal(temp3, Address(temp1, element_size * constant));
3089 } else {
3090 if (length.IsStackSlot()) {
3091 // Location `length` is again pointing at a stack slot, as
3092 // register `temp3` (which was containing the length parameter
3093 // earlier) has been overwritten; restore it now
3094 DCHECK(length.Equals(length_arg));
3095 __ movl(temp3, Address(ESP, length.GetStackIndex()));
3096 length = Location::RegisterLocation(temp3);
3097 }
3098 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3099 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003100
Roland Levillain0b671c02016-08-19 12:02:34 +01003101 // SystemArrayCopy implementation for Baker read barriers (see
3102 // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier):
3103 //
3104 // if (src_ptr != end_ptr) {
3105 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
3106 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
3107 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
3108 // if (is_gray) {
3109 // // Slow-path copy.
3110 // for (size_t i = 0; i != length; ++i) {
3111 // dest_array[dest_pos + i] =
3112 // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i])));
3113 // }
3114 // } else {
3115 // // Fast-path copy.
3116 // do {
3117 // *dest_ptr++ = *src_ptr++;
3118 // } while (src_ptr != end_ptr)
3119 // }
3120 // }
3121
3122 NearLabel loop, done;
3123
3124 // Don't enter copy loop if `length == 0`.
3125 __ cmpl(temp1, temp3);
3126 __ j(kEqual, &done);
3127
Vladimir Marko953437b2016-08-24 08:30:46 +00003128 // Given the numeric representation, it's enough to check the low bit of the rb_state.
3129 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
3130 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
3131 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
3132 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
3133 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
3134 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
3135
3136 // if (rb_state == ReadBarrier::gray_ptr_)
3137 // goto slow_path;
3138 // At this point, just do the "if" and make sure that flags are preserved until the branch.
3139 __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain0b671c02016-08-19 12:02:34 +01003140
3141 // Load fence to prevent load-load reordering.
3142 // Note that this is a no-op, thanks to the x86 memory model.
3143 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3144
3145 // Slow path used to copy array when `src` is gray.
3146 SlowPathCode* read_barrier_slow_path =
3147 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke);
3148 codegen_->AddSlowPath(read_barrier_slow_path);
3149
Vladimir Marko953437b2016-08-24 08:30:46 +00003150 // We have done the "if" of the gray bit check above, now branch based on the flags.
3151 __ j(kNotZero, read_barrier_slow_path->GetEntryLabel());
Roland Levillain0b671c02016-08-19 12:02:34 +01003152
3153 // Fast-path copy.
3154
3155 // Set the base destination address in `temp2`.
3156 if (dest_pos.IsConstant()) {
3157 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3158 __ leal(temp2, Address(dest, element_size * constant + offset));
3159 } else {
3160 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3161 }
3162
3163 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3164 // poison/unpoison.
3165 __ Bind(&loop);
3166 __ pushl(Address(temp1, 0));
3167 __ cfi().AdjustCFAOffset(4);
3168 __ popl(Address(temp2, 0));
3169 __ cfi().AdjustCFAOffset(-4);
3170 __ addl(temp1, Immediate(element_size));
3171 __ addl(temp2, Immediate(element_size));
3172 __ cmpl(temp1, temp3);
3173 __ j(kNotEqual, &loop);
3174
3175 __ Bind(read_barrier_slow_path->GetExitLabel());
3176 __ Bind(&done);
3177 } else {
3178 // Non read barrier code.
3179
3180 // Compute the base destination address in `temp2`.
3181 if (dest_pos.IsConstant()) {
3182 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3183 __ leal(temp2, Address(dest, element_size * constant + offset));
3184 } else {
3185 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3186 }
3187
3188 // Compute the end source address in `temp3`.
3189 if (length.IsConstant()) {
3190 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3191 __ leal(temp3, Address(temp1, element_size * constant));
3192 } else {
3193 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3194 }
3195
3196 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3197 // poison/unpoison.
3198 NearLabel loop, done;
3199 __ cmpl(temp1, temp3);
3200 __ j(kEqual, &done);
3201 __ Bind(&loop);
3202 __ pushl(Address(temp1, 0));
3203 __ cfi().AdjustCFAOffset(4);
3204 __ popl(Address(temp2, 0));
3205 __ cfi().AdjustCFAOffset(-4);
3206 __ addl(temp1, Immediate(element_size));
3207 __ addl(temp2, Immediate(element_size));
3208 __ cmpl(temp1, temp3);
3209 __ j(kNotEqual, &loop);
3210 __ Bind(&done);
3211 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003212
3213 // We only need one card marking on the destination array.
3214 codegen_->MarkGCCard(temp1,
3215 temp2,
3216 dest,
3217 Register(kNoRegister),
3218 /* value_can_be_null */ false);
3219
Roland Levillain0b671c02016-08-19 12:02:34 +01003220 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003221}
3222
Aart Bik2f9fcc92016-03-01 15:16:54 -08003223UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003224UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
3225UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite)
3226UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit)
3227UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit)
3228UNIMPLEMENTED_INTRINSIC(X86, IntegerLowestOneBit)
3229UNIMPLEMENTED_INTRINSIC(X86, LongLowestOneBit)
Mark Mendell09ed1a32015-03-25 08:30:06 -04003230
Aart Bik0e54c012016-03-04 12:08:31 -08003231// 1.8.
3232UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt)
3233UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong)
3234UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt)
3235UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong)
3236UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003237
Aart Bik2f9fcc92016-03-01 15:16:54 -08003238UNREACHABLE_INTRINSICS(X86)
Roland Levillain4d027112015-07-01 15:41:14 +01003239
3240#undef __
3241
Mark Mendell09ed1a32015-03-25 08:30:06 -04003242} // namespace x86
3243} // namespace art