blob: 50ea33d7f3fafcf1892421d4c385a28526656e1f [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080028#include "lock_word.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/array-inl.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080030#include "mirror/reference.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040031#include "mirror/string.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080032#include "scoped_thread_state_change-inl.h"
33#include "thread-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040034#include "utils/x86/assembler_x86.h"
35#include "utils/x86/constants_x86.h"
36
37namespace art {
38
39namespace x86 {
40
41static constexpr int kDoubleNaNHigh = 0x7FF80000;
42static constexpr int kDoubleNaNLow = 0x00000000;
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000043static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
44static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
Mark Mendell09ed1a32015-03-25 08:30:06 -040045
Mark Mendellfb8d2792015-03-31 22:16:59 -040046IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000047 : arena_(codegen->GetGraph()->GetArena()),
48 codegen_(codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -040049}
50
51
Mark Mendell09ed1a32015-03-25 08:30:06 -040052X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010053 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040054}
55
56ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
57 return codegen_->GetGraph()->GetArena();
58}
59
60bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
61 Dispatch(invoke);
62 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 if (res == nullptr) {
64 return false;
65 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000066 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040067}
68
Roland Levillainec525fc2015-04-28 15:50:20 +010069static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010070 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010071 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040072}
73
Andreas Gampe85b62f22015-09-09 13:15:38 -070074using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040075
Roland Levillain0b671c02016-08-19 12:02:34 +010076// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
77#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
78
79// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
80class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode {
81 public:
82 explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction)
83 : SlowPathCode(instruction) {
84 DCHECK(kEmitCompilerReadBarrier);
85 DCHECK(kUseBakerReadBarrier);
86 }
87
88 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
89 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
90 LocationSummary* locations = instruction_->GetLocations();
91 DCHECK(locations->CanCall());
92 DCHECK(instruction_->IsInvokeStaticOrDirect())
93 << "Unexpected instruction in read barrier arraycopy slow path: "
94 << instruction_->DebugName();
95 DCHECK(instruction_->GetLocations()->Intrinsified());
96 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
97
98 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
99 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
100
101 Register src = locations->InAt(0).AsRegister<Register>();
102 Location src_pos = locations->InAt(1);
103 Register dest = locations->InAt(2).AsRegister<Register>();
104 Location dest_pos = locations->InAt(3);
105 Location length = locations->InAt(4);
106 Location temp1_loc = locations->GetTemp(0);
107 Register temp1 = temp1_loc.AsRegister<Register>();
108 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
109 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
110
111 __ Bind(GetEntryLabel());
112 // In this code path, registers `temp1`, `temp2`, and `temp3`
113 // (resp.) are not used for the base source address, the base
114 // destination address, and the end source address (resp.), as in
115 // other SystemArrayCopy intrinsic code paths. Instead they are
116 // (resp.) used for:
117 // - the loop index (`i`);
118 // - the source index (`src_index`) and the loaded (source)
119 // reference (`value`); and
120 // - the destination index (`dest_index`).
121
122 // i = 0
123 __ xorl(temp1, temp1);
124 NearLabel loop;
125 __ Bind(&loop);
126 // value = src_array[i + src_pos]
127 if (src_pos.IsConstant()) {
128 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
129 int32_t adjusted_offset = offset + constant * element_size;
130 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset));
131 } else {
132 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
133 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset));
134 }
135 __ MaybeUnpoisonHeapReference(temp2);
136 // TODO: Inline the mark bit check before calling the runtime?
137 // value = ReadBarrier::Mark(value)
138 // No need to save live registers; it's taken care of by the
139 // entrypoint. Also, there is no need to update the stack mask,
140 // as this runtime call will not trigger a garbage collection.
141 // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more
142 // explanations.)
143 DCHECK_NE(temp2, ESP);
144 DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2;
145 int32_t entry_point_offset =
146 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2);
147 // This runtime call does not require a stack map.
148 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
149 __ MaybePoisonHeapReference(temp2);
150 // dest_array[i + dest_pos] = value
151 if (dest_pos.IsConstant()) {
152 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
153 int32_t adjusted_offset = offset + constant * element_size;
154 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2);
155 } else {
156 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
157 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2);
158 }
159 // ++i
160 __ addl(temp1, Immediate(1));
161 // if (i != length) goto loop
162 x86_codegen->GenerateIntCompare(temp1_loc, length);
163 __ j(kNotEqual, &loop);
164 __ jmp(GetExitLabel());
165 }
166
167 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathX86"; }
168
169 private:
170 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86);
171};
172
173#undef __
174
Mark Mendell09ed1a32015-03-25 08:30:06 -0400175#define __ assembler->
176
177static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
178 LocationSummary* locations = new (arena) LocationSummary(invoke,
179 LocationSummary::kNoCall,
180 kIntrinsified);
181 locations->SetInAt(0, Location::RequiresFpuRegister());
182 locations->SetOut(Location::RequiresRegister());
183 if (is64bit) {
184 locations->AddTemp(Location::RequiresFpuRegister());
185 }
186}
187
188static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
189 LocationSummary* locations = new (arena) LocationSummary(invoke,
190 LocationSummary::kNoCall,
191 kIntrinsified);
192 locations->SetInAt(0, Location::RequiresRegister());
193 locations->SetOut(Location::RequiresFpuRegister());
194 if (is64bit) {
195 locations->AddTemp(Location::RequiresFpuRegister());
196 locations->AddTemp(Location::RequiresFpuRegister());
197 }
198}
199
200static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
201 Location input = locations->InAt(0);
202 Location output = locations->Out();
203 if (is64bit) {
204 // Need to use the temporary.
205 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
206 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
207 __ movd(output.AsRegisterPairLow<Register>(), temp);
208 __ psrlq(temp, Immediate(32));
209 __ movd(output.AsRegisterPairHigh<Register>(), temp);
210 } else {
211 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
212 }
213}
214
215static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
216 Location input = locations->InAt(0);
217 Location output = locations->Out();
218 if (is64bit) {
219 // Need to use the temporary.
220 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
221 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
222 __ movd(temp1, input.AsRegisterPairLow<Register>());
223 __ movd(temp2, input.AsRegisterPairHigh<Register>());
224 __ punpckldq(temp1, temp2);
225 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
226 } else {
227 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
228 }
229}
230
231void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000232 CreateFPToIntLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400233}
234void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000235 CreateIntToFPLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400236}
237
238void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000239 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400240}
241void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000242 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400243}
244
245void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000246 CreateFPToIntLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400247}
248void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000249 CreateIntToFPLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400250}
251
252void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000253 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400254}
255void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000256 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400257}
258
259static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
260 LocationSummary* locations = new (arena) LocationSummary(invoke,
261 LocationSummary::kNoCall,
262 kIntrinsified);
263 locations->SetInAt(0, Location::RequiresRegister());
264 locations->SetOut(Location::SameAsFirstInput());
265}
266
267static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
268 LocationSummary* locations = new (arena) LocationSummary(invoke,
269 LocationSummary::kNoCall,
270 kIntrinsified);
271 locations->SetInAt(0, Location::RequiresRegister());
272 locations->SetOut(Location::RequiresRegister());
273}
274
275static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
276 LocationSummary* locations = new (arena) LocationSummary(invoke,
277 LocationSummary::kNoCall,
278 kIntrinsified);
279 locations->SetInAt(0, Location::RequiresRegister());
280 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
281}
282
283static void GenReverseBytes(LocationSummary* locations,
284 Primitive::Type size,
285 X86Assembler* assembler) {
286 Register out = locations->Out().AsRegister<Register>();
287
288 switch (size) {
289 case Primitive::kPrimShort:
290 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
291 __ bswapl(out);
292 __ sarl(out, Immediate(16));
293 break;
294 case Primitive::kPrimInt:
295 __ bswapl(out);
296 break;
297 default:
298 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
299 UNREACHABLE();
300 }
301}
302
303void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
304 CreateIntToIntLocations(arena_, invoke);
305}
306
307void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
308 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
309}
310
Mark Mendell58d25fd2015-04-03 14:52:31 -0400311void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
312 CreateLongToLongLocations(arena_, invoke);
313}
314
315void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
316 LocationSummary* locations = invoke->GetLocations();
317 Location input = locations->InAt(0);
318 Register input_lo = input.AsRegisterPairLow<Register>();
319 Register input_hi = input.AsRegisterPairHigh<Register>();
320 Location output = locations->Out();
321 Register output_lo = output.AsRegisterPairLow<Register>();
322 Register output_hi = output.AsRegisterPairHigh<Register>();
323
324 X86Assembler* assembler = GetAssembler();
325 // Assign the inputs to the outputs, mixing low/high.
326 __ movl(output_lo, input_hi);
327 __ movl(output_hi, input_lo);
328 __ bswapl(output_lo);
329 __ bswapl(output_hi);
330}
331
Mark Mendell09ed1a32015-03-25 08:30:06 -0400332void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
333 CreateIntToIntLocations(arena_, invoke);
334}
335
336void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
337 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
338}
339
340
341// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
342// need is 64b.
343
344static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
345 // TODO: Enable memory operations when the assembler supports them.
346 LocationSummary* locations = new (arena) LocationSummary(invoke,
347 LocationSummary::kNoCall,
348 kIntrinsified);
349 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400350 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000351 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
352 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000353 if (static_or_direct->HasSpecialInput() &&
354 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000355 // We need addressibility for the constant area.
356 locations->SetInAt(1, Location::RequiresRegister());
357 // We need a temporary to hold the constant.
358 locations->AddTemp(Location::RequiresFpuRegister());
359 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400360}
361
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000362static void MathAbsFP(HInvoke* invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000363 bool is64bit,
364 X86Assembler* assembler,
365 CodeGeneratorX86* codegen) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000366 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400367 Location output = locations->Out();
368
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000369 DCHECK(output.IsFpuRegister());
Nicolas Geoffray97793072016-02-16 15:33:54 +0000370 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000371 HX86ComputeBaseMethodAddress* method_address =
372 invoke->InputAt(1)->AsX86ComputeBaseMethodAddress();
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000373 DCHECK(locations->InAt(1).IsRegister());
374 // We also have a constant area pointer.
375 Register constant_area = locations->InAt(1).AsRegister<Register>();
376 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
377 if (is64bit) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000378 __ movsd(temp, codegen->LiteralInt64Address(
379 INT64_C(0x7FFFFFFFFFFFFFFF), method_address, constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000380 __ andpd(output.AsFpuRegister<XmmRegister>(), temp);
381 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000382 __ movss(temp, codegen->LiteralInt32Address(
383 INT32_C(0x7FFFFFFF), method_address, constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000384 __ andps(output.AsFpuRegister<XmmRegister>(), temp);
385 }
386 } else {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400387 // Create the right constant on an aligned stack.
388 if (is64bit) {
389 __ subl(ESP, Immediate(8));
390 __ pushl(Immediate(0x7FFFFFFF));
391 __ pushl(Immediate(0xFFFFFFFF));
392 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
393 } else {
394 __ subl(ESP, Immediate(12));
395 __ pushl(Immediate(0x7FFFFFFF));
396 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
397 }
398 __ addl(ESP, Immediate(16));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400399 }
400}
401
402void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
403 CreateFloatToFloat(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000407 MathAbsFP(invoke, /* is64bit */ true, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408}
409
410void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
411 CreateFloatToFloat(arena_, invoke);
412}
413
414void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000415 MathAbsFP(invoke, /* is64bit */ false, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400416}
417
418static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
419 LocationSummary* locations = new (arena) LocationSummary(invoke,
420 LocationSummary::kNoCall,
421 kIntrinsified);
422 locations->SetInAt(0, Location::RegisterLocation(EAX));
423 locations->SetOut(Location::SameAsFirstInput());
424 locations->AddTemp(Location::RegisterLocation(EDX));
425}
426
427static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
428 Location output = locations->Out();
429 Register out = output.AsRegister<Register>();
430 DCHECK_EQ(out, EAX);
431 Register temp = locations->GetTemp(0).AsRegister<Register>();
432 DCHECK_EQ(temp, EDX);
433
434 // Sign extend EAX into EDX.
435 __ cdq();
436
437 // XOR EAX with sign.
438 __ xorl(EAX, EDX);
439
440 // Subtract out sign to correct.
441 __ subl(EAX, EDX);
442
443 // The result is in EAX.
444}
445
446static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
447 LocationSummary* locations = new (arena) LocationSummary(invoke,
448 LocationSummary::kNoCall,
449 kIntrinsified);
450 locations->SetInAt(0, Location::RequiresRegister());
451 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
452 locations->AddTemp(Location::RequiresRegister());
453}
454
455static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
456 Location input = locations->InAt(0);
457 Register input_lo = input.AsRegisterPairLow<Register>();
458 Register input_hi = input.AsRegisterPairHigh<Register>();
459 Location output = locations->Out();
460 Register output_lo = output.AsRegisterPairLow<Register>();
461 Register output_hi = output.AsRegisterPairHigh<Register>();
462 Register temp = locations->GetTemp(0).AsRegister<Register>();
463
464 // Compute the sign into the temporary.
465 __ movl(temp, input_hi);
466 __ sarl(temp, Immediate(31));
467
468 // Store the sign into the output.
469 __ movl(output_lo, temp);
470 __ movl(output_hi, temp);
471
472 // XOR the input to the output.
473 __ xorl(output_lo, input_lo);
474 __ xorl(output_hi, input_hi);
475
476 // Subtract the sign.
477 __ subl(output_lo, temp);
478 __ sbbl(output_hi, temp);
479}
480
481void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
482 CreateAbsIntLocation(arena_, invoke);
483}
484
485void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
486 GenAbsInteger(invoke->GetLocations(), GetAssembler());
487}
488
489void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
490 CreateAbsLongLocation(arena_, invoke);
491}
492
493void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
494 GenAbsLong(invoke->GetLocations(), GetAssembler());
495}
496
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000497static void GenMinMaxFP(HInvoke* invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000498 bool is_min,
499 bool is_double,
500 X86Assembler* assembler,
501 CodeGeneratorX86* codegen) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000502 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400503 Location op1_loc = locations->InAt(0);
504 Location op2_loc = locations->InAt(1);
505 Location out_loc = locations->Out();
506 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
507
508 // Shortcut for same input locations.
509 if (op1_loc.Equals(op2_loc)) {
510 DCHECK(out_loc.Equals(op1_loc));
511 return;
512 }
513
514 // (out := op1)
515 // out <=? op2
516 // if Nan jmp Nan_label
517 // if out is min jmp done
518 // if op2 is min jmp op2_label
519 // handle -0/+0
520 // jmp done
521 // Nan_label:
522 // out := NaN
523 // op2_label:
524 // out := op2
525 // done:
526 //
527 // This removes one jmp, but needs to copy one input (op1) to out.
528 //
529 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
530
531 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
532
Mark Mendell0c9497d2015-08-21 09:30:05 -0400533 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400534 if (is_double) {
535 __ ucomisd(out, op2);
536 } else {
537 __ ucomiss(out, op2);
538 }
539
540 __ j(Condition::kParityEven, &nan);
541
542 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
543 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
544
545 // Handle 0.0/-0.0.
546 if (is_min) {
547 if (is_double) {
548 __ orpd(out, op2);
549 } else {
550 __ orps(out, op2);
551 }
552 } else {
553 if (is_double) {
554 __ andpd(out, op2);
555 } else {
556 __ andps(out, op2);
557 }
558 }
559 __ jmp(&done);
560
561 // NaN handling.
562 __ Bind(&nan);
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000563 // Do we have a constant area pointer?
Nicolas Geoffray97793072016-02-16 15:33:54 +0000564 if (locations->GetInputCount() == 3 && locations->InAt(2).IsValid()) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000565 HX86ComputeBaseMethodAddress* method_address =
566 invoke->InputAt(2)->AsX86ComputeBaseMethodAddress();
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000567 DCHECK(locations->InAt(2).IsRegister());
568 Register constant_area = locations->InAt(2).AsRegister<Register>();
569 if (is_double) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000570 __ movsd(out, codegen->LiteralInt64Address(kDoubleNaN, method_address, constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000571 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000572 __ movss(out, codegen->LiteralInt32Address(kFloatNaN, method_address, constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000573 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400574 } else {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000575 if (is_double) {
576 __ pushl(Immediate(kDoubleNaNHigh));
577 __ pushl(Immediate(kDoubleNaNLow));
578 __ movsd(out, Address(ESP, 0));
579 __ addl(ESP, Immediate(8));
580 } else {
581 __ pushl(Immediate(kFloatNaN));
582 __ movss(out, Address(ESP, 0));
583 __ addl(ESP, Immediate(4));
584 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400585 }
586 __ jmp(&done);
587
588 // out := op2;
589 __ Bind(&op2_label);
590 if (is_double) {
591 __ movsd(out, op2);
592 } else {
593 __ movss(out, op2);
594 }
595
596 // Done.
597 __ Bind(&done);
598}
599
600static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
601 LocationSummary* locations = new (arena) LocationSummary(invoke,
602 LocationSummary::kNoCall,
603 kIntrinsified);
604 locations->SetInAt(0, Location::RequiresFpuRegister());
605 locations->SetInAt(1, Location::RequiresFpuRegister());
606 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
607 // the second input to be the output (we can simply swap inputs).
608 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000609 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
610 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000611 if (static_or_direct->HasSpecialInput() &&
612 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000613 locations->SetInAt(2, Location::RequiresRegister());
614 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400615}
616
617void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
618 CreateFPFPToFPLocations(arena_, invoke);
619}
620
621void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000622 GenMinMaxFP(invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000623 /* is_min */ true,
624 /* is_double */ true,
625 GetAssembler(),
626 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400627}
628
629void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
630 CreateFPFPToFPLocations(arena_, invoke);
631}
632
633void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000634 GenMinMaxFP(invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000635 /* is_min */ true,
636 /* is_double */ false,
637 GetAssembler(),
638 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400639}
640
641void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
642 CreateFPFPToFPLocations(arena_, invoke);
643}
644
645void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000646 GenMinMaxFP(invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000647 /* is_min */ false,
648 /* is_double */ true,
649 GetAssembler(),
650 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400651}
652
653void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
654 CreateFPFPToFPLocations(arena_, invoke);
655}
656
657void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000658 GenMinMaxFP(invoke,
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000659 /* is_min */ false,
660 /* is_double */ false,
661 GetAssembler(),
662 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400663}
664
665static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
666 X86Assembler* assembler) {
667 Location op1_loc = locations->InAt(0);
668 Location op2_loc = locations->InAt(1);
669
670 // Shortcut for same input locations.
671 if (op1_loc.Equals(op2_loc)) {
672 // Can return immediately, as op1_loc == out_loc.
673 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
674 // a copy here.
675 DCHECK(locations->Out().Equals(op1_loc));
676 return;
677 }
678
679 if (is_long) {
680 // Need to perform a subtract to get the sign right.
681 // op1 is already in the same location as the output.
682 Location output = locations->Out();
683 Register output_lo = output.AsRegisterPairLow<Register>();
684 Register output_hi = output.AsRegisterPairHigh<Register>();
685
686 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
687 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
688
689 // Spare register to compute the subtraction to set condition code.
690 Register temp = locations->GetTemp(0).AsRegister<Register>();
691
692 // Subtract off op2_low.
693 __ movl(temp, output_lo);
694 __ subl(temp, op2_lo);
695
696 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
697 __ movl(temp, output_hi);
698 __ sbbl(temp, op2_hi);
699
700 // Now the condition code is correct.
701 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
702 __ cmovl(cond, output_lo, op2_lo);
703 __ cmovl(cond, output_hi, op2_hi);
704 } else {
705 Register out = locations->Out().AsRegister<Register>();
706 Register op2 = op2_loc.AsRegister<Register>();
707
708 // (out := op1)
709 // out <=? op2
710 // if out is min jmp done
711 // out := op2
712 // done:
713
714 __ cmpl(out, op2);
715 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
716 __ cmovl(cond, out, op2);
717 }
718}
719
720static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
721 LocationSummary* locations = new (arena) LocationSummary(invoke,
722 LocationSummary::kNoCall,
723 kIntrinsified);
724 locations->SetInAt(0, Location::RequiresRegister());
725 locations->SetInAt(1, Location::RequiresRegister());
726 locations->SetOut(Location::SameAsFirstInput());
727}
728
729static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
730 LocationSummary* locations = new (arena) LocationSummary(invoke,
731 LocationSummary::kNoCall,
732 kIntrinsified);
733 locations->SetInAt(0, Location::RequiresRegister());
734 locations->SetInAt(1, Location::RequiresRegister());
735 locations->SetOut(Location::SameAsFirstInput());
736 // Register to use to perform a long subtract to set cc.
737 locations->AddTemp(Location::RequiresRegister());
738}
739
740void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
741 CreateIntIntToIntLocations(arena_, invoke);
742}
743
744void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000745 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400746}
747
748void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
749 CreateLongLongToLongLocations(arena_, invoke);
750}
751
752void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000753 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400754}
755
756void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
757 CreateIntIntToIntLocations(arena_, invoke);
758}
759
760void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000761 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400762}
763
764void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
765 CreateLongLongToLongLocations(arena_, invoke);
766}
767
768void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000769 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400770}
771
772static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
773 LocationSummary* locations = new (arena) LocationSummary(invoke,
774 LocationSummary::kNoCall,
775 kIntrinsified);
776 locations->SetInAt(0, Location::RequiresFpuRegister());
777 locations->SetOut(Location::RequiresFpuRegister());
778}
779
780void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
781 CreateFPToFPLocations(arena_, invoke);
782}
783
784void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
785 LocationSummary* locations = invoke->GetLocations();
786 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
787 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
788
789 GetAssembler()->sqrtsd(out, in);
790}
791
Mark Mendellfb8d2792015-03-31 22:16:59 -0400792static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100793 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400794
795 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100796 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
797 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700798 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400799
800 // Copy the result back to the expected output.
801 Location out = invoke->GetLocations()->Out();
802 if (out.IsValid()) {
803 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700804 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400805 }
806}
807
808static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
809 HInvoke* invoke,
810 CodeGeneratorX86* codegen) {
811 // Do we have instruction support?
812 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
813 CreateFPToFPLocations(arena, invoke);
814 return;
815 }
816
817 // We have to fall back to a call to the intrinsic.
818 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100819 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400820 InvokeRuntimeCallingConvention calling_convention;
821 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
822 locations->SetOut(Location::FpuRegisterLocation(XMM0));
823 // Needs to be EAX for the invoke.
824 locations->AddTemp(Location::RegisterLocation(EAX));
825}
826
827static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
828 HInvoke* invoke,
829 X86Assembler* assembler,
830 int round_mode) {
831 LocationSummary* locations = invoke->GetLocations();
832 if (locations->WillCall()) {
833 InvokeOutOfLineIntrinsic(codegen, invoke);
834 } else {
835 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
836 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
837 __ roundsd(out, in, Immediate(round_mode));
838 }
839}
840
841void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
842 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
843}
844
845void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
846 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
847}
848
849void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
850 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
851}
852
853void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
854 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
855}
856
857void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
858 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
859}
860
861void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
862 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
863}
864
Mark Mendellfb8d2792015-03-31 22:16:59 -0400865void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
866 // Do we have instruction support?
867 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
Aart Bik2c9f4952016-08-01 16:52:27 -0700868 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
869 DCHECK(static_or_direct != nullptr);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400870 LocationSummary* locations = new (arena_) LocationSummary(invoke,
871 LocationSummary::kNoCall,
872 kIntrinsified);
873 locations->SetInAt(0, Location::RequiresFpuRegister());
Aart Bik2c9f4952016-08-01 16:52:27 -0700874 if (static_or_direct->HasSpecialInput() &&
875 invoke->InputAt(
876 static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
877 locations->SetInAt(1, Location::RequiresRegister());
878 }
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100879 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400880 locations->AddTemp(Location::RequiresFpuRegister());
881 locations->AddTemp(Location::RequiresFpuRegister());
882 return;
883 }
884
885 // We have to fall back to a call to the intrinsic.
886 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Aart Bik2c9f4952016-08-01 16:52:27 -0700887 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400888 InvokeRuntimeCallingConvention calling_convention;
889 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
890 locations->SetOut(Location::RegisterLocation(EAX));
891 // Needs to be EAX for the invoke.
892 locations->AddTemp(Location::RegisterLocation(EAX));
893}
894
895void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
896 LocationSummary* locations = invoke->GetLocations();
Aart Bik2c9f4952016-08-01 16:52:27 -0700897 if (locations->WillCall()) { // TODO: can we reach this?
Mark Mendellfb8d2792015-03-31 22:16:59 -0400898 InvokeOutOfLineIntrinsic(codegen_, invoke);
899 return;
900 }
901
Mark Mendellfb8d2792015-03-31 22:16:59 -0400902 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700903 XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
904 XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendellfb8d2792015-03-31 22:16:59 -0400905 Register out = locations->Out().AsRegister<Register>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700906 NearLabel skip_incr, done;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400907 X86Assembler* assembler = GetAssembler();
908
Aart Bik2c9f4952016-08-01 16:52:27 -0700909 // Since no direct x86 rounding instruction matches the required semantics,
910 // this intrinsic is implemented as follows:
911 // result = floor(in);
912 // if (in - result >= 0.5f)
913 // result = result + 1.0f;
914 __ movss(t2, in);
915 __ roundss(t1, in, Immediate(1));
916 __ subss(t2, t1);
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700917 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
918 // Direct constant area available.
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000919 HX86ComputeBaseMethodAddress* method_address =
920 invoke->InputAt(1)->AsX86ComputeBaseMethodAddress();
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700921 Register constant_area = locations->InAt(1).AsRegister<Register>();
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000922 __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f),
923 method_address,
924 constant_area));
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700925 __ j(kBelow, &skip_incr);
Nicolas Geoffray133719e2017-01-22 15:44:39 +0000926 __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f),
927 method_address,
928 constant_area));
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700929 __ Bind(&skip_incr);
930 } else {
931 // No constant area: go through stack.
932 __ pushl(Immediate(bit_cast<int32_t, float>(0.5f)));
933 __ pushl(Immediate(bit_cast<int32_t, float>(1.0f)));
934 __ comiss(t2, Address(ESP, 4));
935 __ j(kBelow, &skip_incr);
936 __ addss(t1, Address(ESP, 0));
937 __ Bind(&skip_incr);
938 __ addl(ESP, Immediate(8));
939 }
Mark Mendellfb8d2792015-03-31 22:16:59 -0400940
Aart Bik2c9f4952016-08-01 16:52:27 -0700941 // Final conversion to an integer. Unfortunately this also does not have a
942 // direct x86 instruction, since NaN should map to 0 and large positive
943 // values need to be clipped to the extreme value.
Mark Mendellfb8d2792015-03-31 22:16:59 -0400944 __ movl(out, Immediate(kPrimIntMax));
Aart Bik2c9f4952016-08-01 16:52:27 -0700945 __ cvtsi2ss(t2, out);
946 __ comiss(t1, t2);
947 __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered
948 __ movl(out, Immediate(0)); // does not change flags
949 __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out)
950 __ cvttss2si(out, t1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400951 __ Bind(&done);
952}
953
Mark Mendella4f12202015-08-06 15:23:34 -0400954static void CreateFPToFPCallLocations(ArenaAllocator* arena,
955 HInvoke* invoke) {
956 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100957 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -0400958 kIntrinsified);
959 InvokeRuntimeCallingConvention calling_convention;
960 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
961 locations->SetOut(Location::FpuRegisterLocation(XMM0));
962}
963
964static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
965 LocationSummary* locations = invoke->GetLocations();
966 DCHECK(locations->WillCall());
967 DCHECK(invoke->IsInvokeStaticOrDirect());
968 X86Assembler* assembler = codegen->GetAssembler();
969
970 // We need some place to pass the parameters.
971 __ subl(ESP, Immediate(16));
972 __ cfi().AdjustCFAOffset(16);
973
974 // Pass the parameters at the bottom of the stack.
975 __ movsd(Address(ESP, 0), XMM0);
976
977 // If we have a second parameter, pass it next.
978 if (invoke->GetNumberOfArguments() == 2) {
979 __ movsd(Address(ESP, 8), XMM1);
980 }
981
982 // Now do the actual call.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100983 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Mark Mendella4f12202015-08-06 15:23:34 -0400984
985 // Extract the return value from the FP stack.
986 __ fstpl(Address(ESP, 0));
987 __ movsd(XMM0, Address(ESP, 0));
988
989 // And clean up the stack.
990 __ addl(ESP, Immediate(16));
991 __ cfi().AdjustCFAOffset(-16);
Mark Mendella4f12202015-08-06 15:23:34 -0400992}
993
994void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
995 CreateFPToFPCallLocations(arena_, invoke);
996}
997
998void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
999 GenFPToFPCall(invoke, codegen_, kQuickCos);
1000}
1001
1002void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
1003 CreateFPToFPCallLocations(arena_, invoke);
1004}
1005
1006void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
1007 GenFPToFPCall(invoke, codegen_, kQuickSin);
1008}
1009
1010void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
1011 CreateFPToFPCallLocations(arena_, invoke);
1012}
1013
1014void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
1015 GenFPToFPCall(invoke, codegen_, kQuickAcos);
1016}
1017
1018void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
1019 CreateFPToFPCallLocations(arena_, invoke);
1020}
1021
1022void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
1023 GenFPToFPCall(invoke, codegen_, kQuickAsin);
1024}
1025
1026void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
1027 CreateFPToFPCallLocations(arena_, invoke);
1028}
1029
1030void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
1031 GenFPToFPCall(invoke, codegen_, kQuickAtan);
1032}
1033
1034void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
1035 CreateFPToFPCallLocations(arena_, invoke);
1036}
1037
1038void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
1039 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
1040}
1041
1042void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
1043 CreateFPToFPCallLocations(arena_, invoke);
1044}
1045
1046void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
1047 GenFPToFPCall(invoke, codegen_, kQuickCosh);
1048}
1049
1050void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
1051 CreateFPToFPCallLocations(arena_, invoke);
1052}
1053
1054void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
1055 GenFPToFPCall(invoke, codegen_, kQuickExp);
1056}
1057
1058void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
1059 CreateFPToFPCallLocations(arena_, invoke);
1060}
1061
1062void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
1063 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
1064}
1065
1066void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
1067 CreateFPToFPCallLocations(arena_, invoke);
1068}
1069
1070void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
1071 GenFPToFPCall(invoke, codegen_, kQuickLog);
1072}
1073
1074void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
1075 CreateFPToFPCallLocations(arena_, invoke);
1076}
1077
1078void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
1079 GenFPToFPCall(invoke, codegen_, kQuickLog10);
1080}
1081
1082void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
1083 CreateFPToFPCallLocations(arena_, invoke);
1084}
1085
1086void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
1087 GenFPToFPCall(invoke, codegen_, kQuickSinh);
1088}
1089
1090void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
1091 CreateFPToFPCallLocations(arena_, invoke);
1092}
1093
1094void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
1095 GenFPToFPCall(invoke, codegen_, kQuickTan);
1096}
1097
1098void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
1099 CreateFPToFPCallLocations(arena_, invoke);
1100}
1101
1102void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
1103 GenFPToFPCall(invoke, codegen_, kQuickTanh);
1104}
1105
1106static void CreateFPFPToFPCallLocations(ArenaAllocator* arena,
1107 HInvoke* invoke) {
1108 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001109 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -04001110 kIntrinsified);
1111 InvokeRuntimeCallingConvention calling_convention;
1112 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
1113 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
1114 locations->SetOut(Location::FpuRegisterLocation(XMM0));
1115}
1116
1117void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
1118 CreateFPFPToFPCallLocations(arena_, invoke);
1119}
1120
1121void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
1122 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
1123}
1124
1125void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
1126 CreateFPFPToFPCallLocations(arena_, invoke);
1127}
1128
1129void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
1130 GenFPToFPCall(invoke, codegen_, kQuickHypot);
1131}
1132
1133void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
1134 CreateFPFPToFPCallLocations(arena_, invoke);
1135}
1136
1137void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
1138 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
1139}
1140
Mark Mendell6bc53a92015-07-01 14:26:52 -04001141void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1142 // We need at least two of the positions or length to be an integer constant,
1143 // or else we won't have enough free registers.
1144 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1145 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1146 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1147
1148 int num_constants =
1149 ((src_pos != nullptr) ? 1 : 0)
1150 + ((dest_pos != nullptr) ? 1 : 0)
1151 + ((length != nullptr) ? 1 : 0);
1152
1153 if (num_constants < 2) {
1154 // Not enough free registers.
1155 return;
1156 }
1157
1158 // As long as we are checking, we might as well check to see if the src and dest
1159 // positions are >= 0.
1160 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1161 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1162 // We will have to fail anyways.
1163 return;
1164 }
1165
1166 // And since we are already checking, check the length too.
1167 if (length != nullptr) {
1168 int32_t len = length->GetValue();
1169 if (len < 0) {
1170 // Just call as normal.
1171 return;
1172 }
1173 }
1174
1175 // Okay, it is safe to generate inline code.
1176 LocationSummary* locations =
1177 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
1178 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1179 locations->SetInAt(0, Location::RequiresRegister());
1180 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1181 locations->SetInAt(2, Location::RequiresRegister());
1182 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1183 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1184
1185 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1186 locations->AddTemp(Location::RegisterLocation(ESI));
1187 locations->AddTemp(Location::RegisterLocation(EDI));
1188 locations->AddTemp(Location::RegisterLocation(ECX));
1189}
1190
1191static void CheckPosition(X86Assembler* assembler,
1192 Location pos,
1193 Register input,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001194 Location length,
Andreas Gampe85b62f22015-09-09 13:15:38 -07001195 SlowPathCode* slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001196 Register temp,
1197 bool length_is_input_length = false) {
1198 // Where is the length in the Array?
Mark Mendell6bc53a92015-07-01 14:26:52 -04001199 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1200
1201 if (pos.IsConstant()) {
1202 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1203 if (pos_const == 0) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001204 if (!length_is_input_length) {
1205 // Check that length(input) >= length.
1206 if (length.IsConstant()) {
1207 __ cmpl(Address(input, length_offset),
1208 Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1209 } else {
1210 __ cmpl(Address(input, length_offset), length.AsRegister<Register>());
1211 }
1212 __ j(kLess, slow_path->GetEntryLabel());
1213 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001214 } else {
1215 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001216 __ movl(temp, Address(input, length_offset));
1217 __ subl(temp, Immediate(pos_const));
Mark Mendell6bc53a92015-07-01 14:26:52 -04001218 __ j(kLess, slow_path->GetEntryLabel());
1219
1220 // Check that (length(input) - pos) >= length.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001221 if (length.IsConstant()) {
1222 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1223 } else {
1224 __ cmpl(temp, length.AsRegister<Register>());
1225 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001226 __ j(kLess, slow_path->GetEntryLabel());
1227 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001228 } else if (length_is_input_length) {
1229 // The only way the copy can succeed is if pos is zero.
1230 Register pos_reg = pos.AsRegister<Register>();
1231 __ testl(pos_reg, pos_reg);
1232 __ j(kNotEqual, slow_path->GetEntryLabel());
Mark Mendell6bc53a92015-07-01 14:26:52 -04001233 } else {
1234 // Check that pos >= 0.
1235 Register pos_reg = pos.AsRegister<Register>();
1236 __ testl(pos_reg, pos_reg);
1237 __ j(kLess, slow_path->GetEntryLabel());
1238
1239 // Check that pos <= length(input).
1240 __ cmpl(Address(input, length_offset), pos_reg);
1241 __ j(kLess, slow_path->GetEntryLabel());
1242
1243 // Check that (length(input) - pos) >= length.
1244 __ movl(temp, Address(input, length_offset));
1245 __ subl(temp, pos_reg);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001246 if (length.IsConstant()) {
1247 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1248 } else {
1249 __ cmpl(temp, length.AsRegister<Register>());
1250 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001251 __ j(kLess, slow_path->GetEntryLabel());
1252 }
1253}
1254
1255void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1256 X86Assembler* assembler = GetAssembler();
1257 LocationSummary* locations = invoke->GetLocations();
1258
1259 Register src = locations->InAt(0).AsRegister<Register>();
1260 Location srcPos = locations->InAt(1);
1261 Register dest = locations->InAt(2).AsRegister<Register>();
1262 Location destPos = locations->InAt(3);
1263 Location length = locations->InAt(4);
1264
1265 // Temporaries that we need for MOVSW.
1266 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1267 DCHECK_EQ(src_base, ESI);
1268 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1269 DCHECK_EQ(dest_base, EDI);
1270 Register count = locations->GetTemp(2).AsRegister<Register>();
1271 DCHECK_EQ(count, ECX);
1272
Andreas Gampe85b62f22015-09-09 13:15:38 -07001273 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001274 codegen_->AddSlowPath(slow_path);
1275
1276 // Bail out if the source and destination are the same (to handle overlap).
1277 __ cmpl(src, dest);
1278 __ j(kEqual, slow_path->GetEntryLabel());
1279
1280 // Bail out if the source is null.
1281 __ testl(src, src);
1282 __ j(kEqual, slow_path->GetEntryLabel());
1283
1284 // Bail out if the destination is null.
1285 __ testl(dest, dest);
1286 __ j(kEqual, slow_path->GetEntryLabel());
1287
1288 // If the length is negative, bail out.
1289 // We have already checked in the LocationsBuilder for the constant case.
1290 if (!length.IsConstant()) {
1291 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1292 __ j(kLess, slow_path->GetEntryLabel());
1293 }
1294
1295 // We need the count in ECX.
1296 if (length.IsConstant()) {
1297 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1298 } else {
1299 __ movl(count, length.AsRegister<Register>());
1300 }
1301
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001302 // Validity checks: source. Use src_base as a temporary register.
1303 CheckPosition(assembler, srcPos, src, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001304
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001305 // Validity checks: dest. Use src_base as a temporary register.
1306 CheckPosition(assembler, destPos, dest, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001307
1308 // Okay, everything checks out. Finally time to do the copy.
1309 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1310 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1311 DCHECK_EQ(char_size, 2u);
1312
1313 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1314
1315 if (srcPos.IsConstant()) {
1316 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1317 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1318 } else {
1319 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1320 ScaleFactor::TIMES_2, data_offset));
1321 }
1322 if (destPos.IsConstant()) {
1323 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1324
1325 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1326 } else {
1327 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1328 ScaleFactor::TIMES_2, data_offset));
1329 }
1330
1331 // Do the move.
1332 __ rep_movsw();
1333
1334 __ Bind(slow_path->GetExitLabel());
1335}
1336
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001337void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1338 // The inputs plus one temp.
1339 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001340 LocationSummary::kCallOnMainAndSlowPath,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001341 kIntrinsified);
1342 InvokeRuntimeCallingConvention calling_convention;
1343 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1344 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1345 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001346}
1347
1348void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1349 X86Assembler* assembler = GetAssembler();
1350 LocationSummary* locations = invoke->GetLocations();
1351
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001352 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001353 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001354
1355 Register argument = locations->InAt(1).AsRegister<Register>();
1356 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001357 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001358 codegen_->AddSlowPath(slow_path);
1359 __ j(kEqual, slow_path->GetEntryLabel());
1360
Serban Constantinescuba45db02016-07-12 22:53:02 +01001361 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001362 __ Bind(slow_path->GetExitLabel());
1363}
1364
Agi Csakid7138c82015-08-13 17:46:44 -07001365void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1366 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1367 LocationSummary::kNoCall,
1368 kIntrinsified);
1369 locations->SetInAt(0, Location::RequiresRegister());
1370 locations->SetInAt(1, Location::RequiresRegister());
1371
1372 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1373 locations->AddTemp(Location::RegisterLocation(ECX));
1374 locations->AddTemp(Location::RegisterLocation(EDI));
1375
1376 // Set output, ESI needed for repe_cmpsl instruction anyways.
1377 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1378}
1379
1380void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1381 X86Assembler* assembler = GetAssembler();
1382 LocationSummary* locations = invoke->GetLocations();
1383
1384 Register str = locations->InAt(0).AsRegister<Register>();
1385 Register arg = locations->InAt(1).AsRegister<Register>();
1386 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1387 Register edi = locations->GetTemp(1).AsRegister<Register>();
1388 Register esi = locations->Out().AsRegister<Register>();
1389
Mark Mendell0c9497d2015-08-21 09:30:05 -04001390 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001391
1392 // Get offsets of count, value, and class fields within a string object.
1393 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1394 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1395 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1396
1397 // Note that the null check must have been done earlier.
1398 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1399
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001400 StringEqualsOptimizations optimizations(invoke);
1401 if (!optimizations.GetArgumentNotNull()) {
1402 // Check if input is null, return false if it is.
1403 __ testl(arg, arg);
1404 __ j(kEqual, &return_false);
1405 }
Agi Csakid7138c82015-08-13 17:46:44 -07001406
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001407 if (!optimizations.GetArgumentIsString()) {
Vladimir Marko53b52002016-05-24 19:30:45 +01001408 // Instanceof check for the argument by comparing class fields.
1409 // All string objects must have the same type since String cannot be subclassed.
1410 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1411 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001412 __ movl(ecx, Address(str, class_offset));
1413 __ cmpl(ecx, Address(arg, class_offset));
1414 __ j(kNotEqual, &return_false);
1415 }
Agi Csakid7138c82015-08-13 17:46:44 -07001416
1417 // Reference equality check, return true if same reference.
1418 __ cmpl(str, arg);
1419 __ j(kEqual, &return_true);
1420
jessicahandojo4877b792016-09-08 19:49:13 -07001421 // Load length and compression flag of receiver string.
Agi Csakid7138c82015-08-13 17:46:44 -07001422 __ movl(ecx, Address(str, count_offset));
jessicahandojo4877b792016-09-08 19:49:13 -07001423 // Check if lengths and compression flags are equal, return false if they're not.
1424 // Two identical strings will always have same compression style since
1425 // compression style is decided on alloc.
Agi Csakid7138c82015-08-13 17:46:44 -07001426 __ cmpl(ecx, Address(arg, count_offset));
1427 __ j(kNotEqual, &return_false);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001428 // Return true if strings are empty. Even with string compression `count == 0` means empty.
1429 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1430 "Expecting 0=compressed, 1=uncompressed");
1431 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001432
jessicahandojo4877b792016-09-08 19:49:13 -07001433 if (mirror::kUseStringCompression) {
1434 NearLabel string_uncompressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001435 // Extract length and differentiate between both compressed or both uncompressed.
1436 // Different compression style is cut above.
1437 __ shrl(ecx, Immediate(1));
1438 __ j(kCarrySet, &string_uncompressed);
jessicahandojo4877b792016-09-08 19:49:13 -07001439 // Divide string length by 2, rounding up, and continue as if uncompressed.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001440 __ addl(ecx, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07001441 __ shrl(ecx, Immediate(1));
1442 __ Bind(&string_uncompressed);
1443 }
Agi Csakid7138c82015-08-13 17:46:44 -07001444 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1445 __ leal(esi, Address(str, value_offset));
1446 __ leal(edi, Address(arg, value_offset));
1447
jessicahandojo4877b792016-09-08 19:49:13 -07001448 // Divide string length by 2 to compare characters 2 at a time and adjust for lengths not
1449 // divisible by 2.
Agi Csakid7138c82015-08-13 17:46:44 -07001450 __ addl(ecx, Immediate(1));
1451 __ shrl(ecx, Immediate(1));
1452
jessicahandojo4877b792016-09-08 19:49:13 -07001453 // Assertions that must hold in order to compare strings 2 characters (uncompressed)
1454 // or 4 characters (compressed) at a time.
Agi Csakid7138c82015-08-13 17:46:44 -07001455 DCHECK_ALIGNED(value_offset, 4);
1456 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1457
1458 // Loop to compare strings two characters at a time starting at the beginning of the string.
1459 __ repe_cmpsl();
1460 // If strings are not equal, zero flag will be cleared.
1461 __ j(kNotEqual, &return_false);
1462
1463 // Return true and exit the function.
1464 // If loop does not result in returning false, we return true.
1465 __ Bind(&return_true);
1466 __ movl(esi, Immediate(1));
1467 __ jmp(&end);
1468
1469 // Return false and exit the function.
1470 __ Bind(&return_false);
1471 __ xorl(esi, esi);
1472 __ Bind(&end);
1473}
1474
Andreas Gampe21030dd2015-05-07 14:46:15 -07001475static void CreateStringIndexOfLocations(HInvoke* invoke,
1476 ArenaAllocator* allocator,
1477 bool start_at_zero) {
1478 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1479 LocationSummary::kCallOnSlowPath,
1480 kIntrinsified);
1481 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1482 locations->SetInAt(0, Location::RegisterLocation(EDI));
1483 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1484 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1485 // of the instruction explicitly.
1486 // Note: This works as we don't clobber EAX anywhere.
1487 locations->SetInAt(1, Location::RegisterLocation(EAX));
1488 if (!start_at_zero) {
1489 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1490 }
1491 // As we clobber EDI during execution anyways, also use it as the output.
1492 locations->SetOut(Location::SameAsFirstInput());
1493
1494 // repne scasw uses ECX as the counter.
1495 locations->AddTemp(Location::RegisterLocation(ECX));
1496 // Need another temporary to be able to compute the result.
1497 locations->AddTemp(Location::RequiresRegister());
jessicahandojo4877b792016-09-08 19:49:13 -07001498 if (mirror::kUseStringCompression) {
1499 // Need another temporary to be able to save unflagged string length.
1500 locations->AddTemp(Location::RequiresRegister());
1501 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001502}
1503
1504static void GenerateStringIndexOf(HInvoke* invoke,
1505 X86Assembler* assembler,
1506 CodeGeneratorX86* codegen,
1507 ArenaAllocator* allocator,
1508 bool start_at_zero) {
1509 LocationSummary* locations = invoke->GetLocations();
1510
1511 // Note that the null check must have been done earlier.
1512 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1513
1514 Register string_obj = locations->InAt(0).AsRegister<Register>();
1515 Register search_value = locations->InAt(1).AsRegister<Register>();
1516 Register counter = locations->GetTemp(0).AsRegister<Register>();
1517 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1518 Register out = locations->Out().AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07001519 // Only used when string compression feature is on.
1520 Register string_length_flagged;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001521
1522 // Check our assumptions for registers.
1523 DCHECK_EQ(string_obj, EDI);
1524 DCHECK_EQ(search_value, EAX);
1525 DCHECK_EQ(counter, ECX);
1526 DCHECK_EQ(out, EDI);
1527
1528 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001529 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001530 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001531 HInstruction* code_point = invoke->InputAt(1);
1532 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001533 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampe21030dd2015-05-07 14:46:15 -07001534 std::numeric_limits<uint16_t>::max()) {
1535 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1536 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1537 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1538 codegen->AddSlowPath(slow_path);
1539 __ jmp(slow_path->GetEntryLabel());
1540 __ Bind(slow_path->GetExitLabel());
1541 return;
1542 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001543 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampe21030dd2015-05-07 14:46:15 -07001544 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1545 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1546 codegen->AddSlowPath(slow_path);
1547 __ j(kAbove, slow_path->GetEntryLabel());
1548 }
1549
1550 // From here down, we know that we are looking for a char that fits in 16 bits.
1551 // Location of reference to data array within the String object.
1552 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1553 // Location of count within the String object.
1554 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1555
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001556 // Load the count field of the string containing the length and compression flag.
Andreas Gampe21030dd2015-05-07 14:46:15 -07001557 __ movl(string_length, Address(string_obj, count_offset));
1558
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001559 // Do a zero-length check. Even with string compression `count == 0` means empty.
1560 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1561 "Expecting 0=compressed, 1=uncompressed");
Andreas Gampe21030dd2015-05-07 14:46:15 -07001562 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001563 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001564 __ testl(string_length, string_length);
1565 __ j(kEqual, &not_found_label);
1566
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001567 if (mirror::kUseStringCompression) {
1568 string_length_flagged = locations->GetTemp(2).AsRegister<Register>();
1569 __ movl(string_length_flagged, string_length);
1570 // Extract the length and shift out the least significant bit used as compression flag.
1571 __ shrl(string_length, Immediate(1));
1572 }
1573
Andreas Gampe21030dd2015-05-07 14:46:15 -07001574 if (start_at_zero) {
1575 // Number of chars to scan is the same as the string length.
1576 __ movl(counter, string_length);
1577
1578 // Move to the start of the string.
1579 __ addl(string_obj, Immediate(value_offset));
1580 } else {
1581 Register start_index = locations->InAt(2).AsRegister<Register>();
1582
1583 // Do a start_index check.
1584 __ cmpl(start_index, string_length);
1585 __ j(kGreaterEqual, &not_found_label);
1586
1587 // Ensure we have a start index >= 0;
1588 __ xorl(counter, counter);
1589 __ cmpl(start_index, Immediate(0));
1590 __ cmovl(kGreater, counter, start_index);
1591
jessicahandojo4877b792016-09-08 19:49:13 -07001592 if (mirror::kUseStringCompression) {
1593 NearLabel modify_counter, offset_uncompressed_label;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001594 __ testl(string_length_flagged, Immediate(1));
1595 __ j(kNotZero, &offset_uncompressed_label);
jessicahandojo4877b792016-09-08 19:49:13 -07001596 // Move to the start of the string: string_obj + value_offset + start_index.
1597 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_1, value_offset));
1598 __ jmp(&modify_counter);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001599
jessicahandojo4877b792016-09-08 19:49:13 -07001600 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1601 __ Bind(&offset_uncompressed_label);
1602 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1603
1604 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1605 // compare.
1606 __ Bind(&modify_counter);
1607 } else {
1608 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1609 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001610 __ negl(counter);
1611 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1612 }
1613
jessicahandojo4877b792016-09-08 19:49:13 -07001614 if (mirror::kUseStringCompression) {
1615 NearLabel uncompressed_string_comparison;
1616 NearLabel comparison_done;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001617 __ testl(string_length_flagged, Immediate(1));
1618 __ j(kNotZero, &uncompressed_string_comparison);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001619
jessicahandojo4877b792016-09-08 19:49:13 -07001620 // Check if EAX (search_value) is ASCII.
1621 __ cmpl(search_value, Immediate(127));
1622 __ j(kGreater, &not_found_label);
1623 // Comparing byte-per-byte.
1624 __ repne_scasb();
1625 __ jmp(&comparison_done);
1626
1627 // Everything is set up for repne scasw:
1628 // * Comparison address in EDI.
1629 // * Counter in ECX.
1630 __ Bind(&uncompressed_string_comparison);
1631 __ repne_scasw();
1632 __ Bind(&comparison_done);
1633 } else {
1634 __ repne_scasw();
1635 }
Andreas Gampe21030dd2015-05-07 14:46:15 -07001636 // Did we find a match?
1637 __ j(kNotEqual, &not_found_label);
1638
1639 // Yes, we matched. Compute the index of the result.
1640 __ subl(string_length, counter);
1641 __ leal(out, Address(string_length, -1));
1642
Mark Mendell0c9497d2015-08-21 09:30:05 -04001643 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001644 __ jmp(&done);
1645
1646 // Failed to match; return -1.
1647 __ Bind(&not_found_label);
1648 __ movl(out, Immediate(-1));
1649
1650 // And join up at the end.
1651 __ Bind(&done);
1652 if (slow_path != nullptr) {
1653 __ Bind(slow_path->GetExitLabel());
1654 }
1655}
1656
1657void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001658 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001659}
1660
1661void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001662 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001663}
1664
1665void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001666 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001667}
1668
1669void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001670 GenerateStringIndexOf(
1671 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001672}
1673
Jeff Hao848f70a2014-01-15 13:49:50 -08001674void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1675 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001676 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001677 kIntrinsified);
1678 InvokeRuntimeCallingConvention calling_convention;
1679 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1680 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1681 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1682 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1683 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001684}
1685
1686void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1687 X86Assembler* assembler = GetAssembler();
1688 LocationSummary* locations = invoke->GetLocations();
1689
1690 Register byte_array = locations->InAt(0).AsRegister<Register>();
1691 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001692 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001693 codegen_->AddSlowPath(slow_path);
1694 __ j(kEqual, slow_path->GetEntryLabel());
1695
Serban Constantinescuba45db02016-07-12 22:53:02 +01001696 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001697 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001698 __ Bind(slow_path->GetExitLabel());
1699}
1700
1701void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1702 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001703 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001704 kIntrinsified);
1705 InvokeRuntimeCallingConvention calling_convention;
1706 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1707 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1708 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1709 locations->SetOut(Location::RegisterLocation(EAX));
1710}
1711
1712void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001713 // No need to emit code checking whether `locations->InAt(2)` is a null
1714 // pointer, as callers of the native method
1715 //
1716 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1717 //
1718 // all include a null check on `data` before calling that method.
Serban Constantinescuba45db02016-07-12 22:53:02 +01001719 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001720 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001721}
1722
1723void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1724 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001725 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001726 kIntrinsified);
1727 InvokeRuntimeCallingConvention calling_convention;
1728 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1729 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001730}
1731
1732void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1733 X86Assembler* assembler = GetAssembler();
1734 LocationSummary* locations = invoke->GetLocations();
1735
1736 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1737 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001738 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001739 codegen_->AddSlowPath(slow_path);
1740 __ j(kEqual, slow_path->GetEntryLabel());
1741
Serban Constantinescuba45db02016-07-12 22:53:02 +01001742 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001743 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001744 __ Bind(slow_path->GetExitLabel());
1745}
1746
Mark Mendell8f8926a2015-08-17 11:39:06 -04001747void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1748 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1749 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1750 LocationSummary::kNoCall,
1751 kIntrinsified);
1752 locations->SetInAt(0, Location::RequiresRegister());
1753 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1754 // Place srcEnd in ECX to save a move below.
1755 locations->SetInAt(2, Location::RegisterLocation(ECX));
1756 locations->SetInAt(3, Location::RequiresRegister());
1757 locations->SetInAt(4, Location::RequiresRegister());
1758
1759 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1760 // We don't have enough registers to also grab ECX, so handle below.
1761 locations->AddTemp(Location::RegisterLocation(ESI));
1762 locations->AddTemp(Location::RegisterLocation(EDI));
1763}
1764
1765void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1766 X86Assembler* assembler = GetAssembler();
1767 LocationSummary* locations = invoke->GetLocations();
1768
1769 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1770 // Location of data in char array buffer.
1771 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1772 // Location of char array data in string.
1773 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1774
1775 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1776 Register obj = locations->InAt(0).AsRegister<Register>();
1777 Location srcBegin = locations->InAt(1);
1778 int srcBegin_value =
1779 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1780 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1781 Register dst = locations->InAt(3).AsRegister<Register>();
1782 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1783
1784 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1785 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1786 DCHECK_EQ(char_size, 2u);
1787
Mark Mendell8f8926a2015-08-17 11:39:06 -04001788 // Compute the number of chars (words) to move.
jessicahandojo4877b792016-09-08 19:49:13 -07001789 // Save ECX, since we don't know if it will be used later.
Mark Mendell8f8926a2015-08-17 11:39:06 -04001790 __ pushl(ECX);
1791 int stack_adjust = kX86WordSize;
1792 __ cfi().AdjustCFAOffset(stack_adjust);
1793 DCHECK_EQ(srcEnd, ECX);
1794 if (srcBegin.IsConstant()) {
jessicahandojo4877b792016-09-08 19:49:13 -07001795 __ subl(ECX, Immediate(srcBegin_value));
Mark Mendell8f8926a2015-08-17 11:39:06 -04001796 } else {
1797 DCHECK(srcBegin.IsRegister());
1798 __ subl(ECX, srcBegin.AsRegister<Register>());
1799 }
1800
jessicahandojo4877b792016-09-08 19:49:13 -07001801 NearLabel done;
1802 if (mirror::kUseStringCompression) {
1803 // Location of count in string
1804 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1805 const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
1806 DCHECK_EQ(c_char_size, 1u);
1807 __ pushl(EAX);
1808 __ cfi().AdjustCFAOffset(stack_adjust);
1809
1810 NearLabel copy_loop, copy_uncompressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001811 __ testl(Address(obj, count_offset), Immediate(1));
1812 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1813 "Expecting 0=compressed, 1=uncompressed");
1814 __ j(kNotZero, &copy_uncompressed);
jessicahandojo4877b792016-09-08 19:49:13 -07001815 // Compute the address of the source string by adding the number of chars from
1816 // the source beginning to the value offset of a string.
1817 __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_1, value_offset));
1818
1819 // Start the loop to copy String's value to Array of Char.
1820 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1821 __ Bind(&copy_loop);
1822 __ jecxz(&done);
1823 // Use EAX temporary (convert byte from ESI to word).
1824 // TODO: Use LODSB/STOSW (not supported by X86Assembler) with AH initialized to 0.
1825 __ movzxb(EAX, Address(ESI, 0));
1826 __ movw(Address(EDI, 0), EAX);
1827 __ leal(EDI, Address(EDI, char_size));
1828 __ leal(ESI, Address(ESI, c_char_size));
1829 // TODO: Add support for LOOP to X86Assembler.
1830 __ subl(ECX, Immediate(1));
1831 __ jmp(&copy_loop);
1832 __ Bind(&copy_uncompressed);
1833 }
1834
1835 // Do the copy for uncompressed string.
1836 // Compute the address of the destination buffer.
1837 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1838 __ leal(ESI, CodeGeneratorX86::ArrayAddress(obj, srcBegin, TIMES_2, value_offset));
Mark Mendell8f8926a2015-08-17 11:39:06 -04001839 __ rep_movsw();
1840
jessicahandojo4877b792016-09-08 19:49:13 -07001841 __ Bind(&done);
1842 if (mirror::kUseStringCompression) {
1843 // Restore EAX.
1844 __ popl(EAX);
1845 __ cfi().AdjustCFAOffset(-stack_adjust);
1846 }
1847 // Restore ECX.
Mark Mendell8f8926a2015-08-17 11:39:06 -04001848 __ popl(ECX);
1849 __ cfi().AdjustCFAOffset(-stack_adjust);
1850}
1851
Mark Mendell09ed1a32015-03-25 08:30:06 -04001852static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1853 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1854 Location out_loc = locations->Out();
1855 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1856 // to avoid a SIGBUS.
1857 switch (size) {
1858 case Primitive::kPrimByte:
1859 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1860 break;
1861 case Primitive::kPrimShort:
1862 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1863 break;
1864 case Primitive::kPrimInt:
1865 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1866 break;
1867 case Primitive::kPrimLong:
1868 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1869 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1870 break;
1871 default:
1872 LOG(FATAL) << "Type not recognized for peek: " << size;
1873 UNREACHABLE();
1874 }
1875}
1876
1877void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1878 CreateLongToIntLocations(arena_, invoke);
1879}
1880
1881void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1882 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1883}
1884
1885void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1886 CreateLongToIntLocations(arena_, invoke);
1887}
1888
1889void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1890 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1891}
1892
1893void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1894 CreateLongToLongLocations(arena_, invoke);
1895}
1896
1897void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1898 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1899}
1900
1901void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1902 CreateLongToIntLocations(arena_, invoke);
1903}
1904
1905void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1906 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1907}
1908
1909static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1910 HInvoke* invoke) {
1911 LocationSummary* locations = new (arena) LocationSummary(invoke,
1912 LocationSummary::kNoCall,
1913 kIntrinsified);
1914 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001915 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001916 if (size == Primitive::kPrimByte) {
1917 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1918 } else {
1919 locations->SetInAt(1, Location::RegisterOrConstant(value));
1920 }
1921}
1922
1923static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1924 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1925 Location value_loc = locations->InAt(1);
1926 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1927 // to avoid a SIGBUS.
1928 switch (size) {
1929 case Primitive::kPrimByte:
1930 if (value_loc.IsConstant()) {
1931 __ movb(Address(address, 0),
1932 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1933 } else {
1934 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1935 }
1936 break;
1937 case Primitive::kPrimShort:
1938 if (value_loc.IsConstant()) {
1939 __ movw(Address(address, 0),
1940 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1941 } else {
1942 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1943 }
1944 break;
1945 case Primitive::kPrimInt:
1946 if (value_loc.IsConstant()) {
1947 __ movl(Address(address, 0),
1948 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1949 } else {
1950 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1951 }
1952 break;
1953 case Primitive::kPrimLong:
1954 if (value_loc.IsConstant()) {
1955 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1956 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1957 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1958 } else {
1959 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1960 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1961 }
1962 break;
1963 default:
1964 LOG(FATAL) << "Type not recognized for poke: " << size;
1965 UNREACHABLE();
1966 }
1967}
1968
1969void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1970 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1971}
1972
1973void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1974 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1975}
1976
1977void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1978 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1979}
1980
1981void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1982 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1983}
1984
1985void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1986 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1987}
1988
1989void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1990 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1991}
1992
1993void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1994 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1995}
1996
1997void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1998 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1999}
2000
2001void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
2002 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2003 LocationSummary::kNoCall,
2004 kIntrinsified);
2005 locations->SetOut(Location::RequiresRegister());
2006}
2007
2008void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
2009 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07002010 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>()));
Mark Mendell09ed1a32015-03-25 08:30:06 -04002011}
2012
Roland Levillain0d5a2812015-11-13 10:07:31 +00002013static void GenUnsafeGet(HInvoke* invoke,
2014 Primitive::Type type,
2015 bool is_volatile,
2016 CodeGeneratorX86* codegen) {
2017 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
2018 LocationSummary* locations = invoke->GetLocations();
2019 Location base_loc = locations->InAt(1);
2020 Register base = base_loc.AsRegister<Register>();
2021 Location offset_loc = locations->InAt(2);
2022 Register offset = offset_loc.AsRegisterPairLow<Register>();
2023 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002024
2025 switch (type) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002026 case Primitive::kPrimInt: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002027 Register output = output_loc.AsRegister<Register>();
2028 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain7c1559a2015-12-15 10:55:36 +00002029 break;
2030 }
2031
2032 case Primitive::kPrimNot: {
2033 Register output = output_loc.AsRegister<Register>();
2034 if (kEmitCompilerReadBarrier) {
2035 if (kUseBakerReadBarrier) {
Sang, Chunlei0fcd2b82016-04-05 17:12:59 +08002036 Address src(base, offset, ScaleFactor::TIMES_1, 0);
2037 codegen->GenerateReferenceLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002038 invoke, output_loc, base, src, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00002039 } else {
2040 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
2041 codegen->GenerateReadBarrierSlow(
2042 invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
2043 }
2044 } else {
2045 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
2046 __ MaybeUnpoisonHeapReference(output);
Roland Levillain4d027112015-07-01 15:41:14 +01002047 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002048 break;
Roland Levillain4d027112015-07-01 15:41:14 +01002049 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002050
2051 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002052 Register output_lo = output_loc.AsRegisterPairLow<Register>();
2053 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002054 if (is_volatile) {
2055 // Need to use a XMM to read atomically.
2056 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2057 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
2058 __ movd(output_lo, temp);
2059 __ psrlq(temp, Immediate(32));
2060 __ movd(output_hi, temp);
2061 } else {
2062 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
2063 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
2064 }
2065 }
2066 break;
2067
2068 default:
2069 LOG(FATAL) << "Unsupported op size " << type;
2070 UNREACHABLE();
2071 }
2072}
2073
Roland Levillain7c1559a2015-12-15 10:55:36 +00002074static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
2075 HInvoke* invoke,
2076 Primitive::Type type,
2077 bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002078 bool can_call = kEmitCompilerReadBarrier &&
2079 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
2080 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002081 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002082 (can_call
2083 ? LocationSummary::kCallOnSlowPath
2084 : LocationSummary::kNoCall),
Mark Mendell09ed1a32015-03-25 08:30:06 -04002085 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +01002086 if (can_call && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002087 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01002088 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002089 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2090 locations->SetInAt(1, Location::RequiresRegister());
2091 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillain7c1559a2015-12-15 10:55:36 +00002092 if (type == Primitive::kPrimLong) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04002093 if (is_volatile) {
2094 // Need to use XMM to read volatile.
2095 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain3d312422016-06-23 13:53:42 +01002096 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002097 } else {
2098 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2099 }
2100 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01002101 locations->SetOut(Location::RequiresRegister(),
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002102 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Mark Mendell09ed1a32015-03-25 08:30:06 -04002103 }
2104}
2105
2106void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002107 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002108}
2109void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002110 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002111}
2112void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002113 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002114}
2115void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002116 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002117}
2118void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002119 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002120}
2121void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002122 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002123}
2124
2125
2126void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002127 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002128}
2129void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002130 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002131}
2132void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002133 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002134}
2135void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002136 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002137}
2138void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002139 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002140}
2141void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002142 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002143}
2144
2145
2146static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
2147 Primitive::Type type,
2148 HInvoke* invoke,
2149 bool is_volatile) {
2150 LocationSummary* locations = new (arena) LocationSummary(invoke,
2151 LocationSummary::kNoCall,
2152 kIntrinsified);
2153 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2154 locations->SetInAt(1, Location::RequiresRegister());
2155 locations->SetInAt(2, Location::RequiresRegister());
2156 locations->SetInAt(3, Location::RequiresRegister());
2157 if (type == Primitive::kPrimNot) {
2158 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01002159 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04002160 // Ensure the value is in a byte register.
2161 locations->AddTemp(Location::RegisterLocation(ECX));
2162 } else if (type == Primitive::kPrimLong && is_volatile) {
2163 locations->AddTemp(Location::RequiresFpuRegister());
2164 locations->AddTemp(Location::RequiresFpuRegister());
2165 }
2166}
2167
2168void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002169 CreateIntIntIntIntToVoidPlusTempsLocations(
2170 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002171}
2172void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002173 CreateIntIntIntIntToVoidPlusTempsLocations(
2174 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002175}
2176void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002177 CreateIntIntIntIntToVoidPlusTempsLocations(
2178 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002179}
2180void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002181 CreateIntIntIntIntToVoidPlusTempsLocations(
2182 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002183}
2184void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002185 CreateIntIntIntIntToVoidPlusTempsLocations(
2186 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002187}
2188void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002189 CreateIntIntIntIntToVoidPlusTempsLocations(
2190 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002191}
2192void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002193 CreateIntIntIntIntToVoidPlusTempsLocations(
2194 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002195}
2196void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002197 CreateIntIntIntIntToVoidPlusTempsLocations(
2198 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002199}
2200void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002201 CreateIntIntIntIntToVoidPlusTempsLocations(
2202 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002203}
2204
2205// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
2206// memory model.
2207static void GenUnsafePut(LocationSummary* locations,
2208 Primitive::Type type,
2209 bool is_volatile,
2210 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002211 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04002212 Register base = locations->InAt(1).AsRegister<Register>();
2213 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2214 Location value_loc = locations->InAt(3);
2215
2216 if (type == Primitive::kPrimLong) {
2217 Register value_lo = value_loc.AsRegisterPairLow<Register>();
2218 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
2219 if (is_volatile) {
2220 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2221 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
2222 __ movd(temp1, value_lo);
2223 __ movd(temp2, value_hi);
2224 __ punpckldq(temp1, temp2);
2225 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
2226 } else {
2227 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
2228 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
2229 }
Roland Levillain4d027112015-07-01 15:41:14 +01002230 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2231 Register temp = locations->GetTemp(0).AsRegister<Register>();
2232 __ movl(temp, value_loc.AsRegister<Register>());
2233 __ PoisonHeapReference(temp);
2234 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002235 } else {
2236 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
2237 }
2238
2239 if (is_volatile) {
Mark P Mendell17077d82015-12-16 19:15:59 +00002240 codegen->MemoryFence();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002241 }
2242
2243 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002244 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04002245 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2246 locations->GetTemp(1).AsRegister<Register>(),
2247 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002248 value_loc.AsRegister<Register>(),
2249 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002250 }
2251}
2252
2253void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002254 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002255}
2256void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002257 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002258}
2259void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002260 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002261}
2262void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002263 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002264}
2265void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002266 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002267}
2268void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002269 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002270}
2271void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002272 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002273}
2274void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002275 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002276}
2277void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002278 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002279}
2280
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002281static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
2282 Primitive::Type type,
Mark Mendell58d25fd2015-04-03 14:52:31 -04002283 HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002284 bool can_call = kEmitCompilerReadBarrier &&
2285 kUseBakerReadBarrier &&
2286 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002287 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002288 (can_call
2289 ? LocationSummary::kCallOnSlowPath
2290 : LocationSummary::kNoCall),
Mark Mendell58d25fd2015-04-03 14:52:31 -04002291 kIntrinsified);
2292 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2293 locations->SetInAt(1, Location::RequiresRegister());
2294 // Offset is a long, but in 32 bit mode, we only need the low word.
2295 // Can we update the invoke here to remove a TypeConvert to Long?
2296 locations->SetInAt(2, Location::RequiresRegister());
2297 // Expected value must be in EAX or EDX:EAX.
2298 // For long, new value must be in ECX:EBX.
2299 if (type == Primitive::kPrimLong) {
2300 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
2301 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
2302 } else {
2303 locations->SetInAt(3, Location::RegisterLocation(EAX));
2304 locations->SetInAt(4, Location::RequiresRegister());
2305 }
2306
2307 // Force a byte register for the output.
2308 locations->SetOut(Location::RegisterLocation(EAX));
2309 if (type == Primitive::kPrimNot) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002310 // Need temporary registers for card-marking, and possibly for
2311 // (Baker) read barrier.
Roland Levillainb488b782015-10-22 11:38:49 +01002312 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002313 // Need a byte register for marking.
2314 locations->AddTemp(Location::RegisterLocation(ECX));
2315 }
2316}
2317
2318void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
2319 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
2320}
2321
2322void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
2323 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
2324}
2325
2326void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002327 // The only read barrier implementation supporting the
2328 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2329 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain391b8662015-12-18 11:43:38 +00002330 return;
2331 }
2332
Mark Mendell58d25fd2015-04-03 14:52:31 -04002333 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
2334}
2335
2336static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002337 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002338 LocationSummary* locations = invoke->GetLocations();
2339
2340 Register base = locations->InAt(1).AsRegister<Register>();
2341 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2342 Location out = locations->Out();
2343 DCHECK_EQ(out.AsRegister<Register>(), EAX);
2344
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002345 // The address of the field within the holding object.
2346 Address field_addr(base, offset, ScaleFactor::TIMES_1, 0);
2347
Roland Levillainb488b782015-10-22 11:38:49 +01002348 if (type == Primitive::kPrimNot) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002349 // The only read barrier implementation supporting the
2350 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2351 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
2352
2353 Location temp1_loc = locations->GetTemp(0);
2354 Register temp1 = temp1_loc.AsRegister<Register>();
2355 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
2356
Roland Levillain4d027112015-07-01 15:41:14 +01002357 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01002358 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01002359 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002360 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01002361
Roland Levillainb488b782015-10-22 11:38:49 +01002362 // Mark card for object assuming new value is stored.
2363 bool value_can_be_null = true; // TODO: Worth finding out this information?
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002364 codegen->MarkGCCard(temp1, temp2, base, value, value_can_be_null);
2365
2366 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2367 // Need to make sure the reference stored in the field is a to-space
2368 // one before attempting the CAS or the CAS could fail incorrectly.
2369 codegen->GenerateReferenceLoadWithBakerReadBarrier(
2370 invoke,
2371 temp1_loc, // Unused, used only as a "temporary" within the read barrier.
2372 base,
2373 field_addr,
2374 /* needs_null_check */ false,
2375 /* always_update_field */ true,
2376 &temp2);
2377 }
Roland Levillainb488b782015-10-22 11:38:49 +01002378
2379 bool base_equals_value = (base == value);
2380 if (kPoisonHeapReferences) {
2381 if (base_equals_value) {
2382 // If `base` and `value` are the same register location, move
2383 // `value` to a temporary register. This way, poisoning
2384 // `value` won't invalidate `base`.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002385 value = temp1;
Roland Levillainb488b782015-10-22 11:38:49 +01002386 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002387 }
Roland Levillainb488b782015-10-22 11:38:49 +01002388
2389 // Check that the register allocator did not assign the location
2390 // of `expected` (EAX) to `value` nor to `base`, so that heap
2391 // poisoning (when enabled) works as intended below.
2392 // - If `value` were equal to `expected`, both references would
2393 // be poisoned twice, meaning they would not be poisoned at
2394 // all, as heap poisoning uses address negation.
2395 // - If `base` were equal to `expected`, poisoning `expected`
2396 // would invalidate `base`.
2397 DCHECK_NE(value, expected);
2398 DCHECK_NE(base, expected);
2399
2400 __ PoisonHeapReference(expected);
2401 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002402 }
2403
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002404 __ LockCmpxchgl(field_addr, value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002405
Roland Levillain0d5a2812015-11-13 10:07:31 +00002406 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002407 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002408
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002409 // Convert ZF into the Boolean result.
Roland Levillainb488b782015-10-22 11:38:49 +01002410 __ setb(kZero, out.AsRegister<Register>());
2411 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002412
Roland Levillain391b8662015-12-18 11:43:38 +00002413 // If heap poisoning is enabled, we need to unpoison the values
2414 // that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002415 if (kPoisonHeapReferences) {
2416 if (base_equals_value) {
2417 // `value` has been moved to a temporary register, no need to
2418 // unpoison it.
2419 } else {
2420 // Ensure `value` is different from `out`, so that unpoisoning
2421 // the former does not invalidate the latter.
2422 DCHECK_NE(value, out.AsRegister<Register>());
2423 __ UnpoisonHeapReference(value);
2424 }
2425 // Do not unpoison the reference contained in register
2426 // `expected`, as it is the same as register `out` (EAX).
2427 }
2428 } else {
2429 if (type == Primitive::kPrimInt) {
2430 // Ensure the expected value is in EAX (required by the CMPXCHG
2431 // instruction).
2432 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002433 __ LockCmpxchgl(field_addr, locations->InAt(4).AsRegister<Register>());
Roland Levillainb488b782015-10-22 11:38:49 +01002434 } else if (type == Primitive::kPrimLong) {
2435 // Ensure the expected value is in EAX:EDX and that the new
2436 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2437 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2438 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2439 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2440 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002441 __ LockCmpxchg8b(field_addr);
Roland Levillainb488b782015-10-22 11:38:49 +01002442 } else {
2443 LOG(FATAL) << "Unexpected CAS type " << type;
2444 }
2445
Roland Levillain0d5a2812015-11-13 10:07:31 +00002446 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2447 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002448
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002449 // Convert ZF into the Boolean result.
Roland Levillainb488b782015-10-22 11:38:49 +01002450 __ setb(kZero, out.AsRegister<Register>());
2451 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002452 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002453}
2454
2455void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
2456 GenCAS(Primitive::kPrimInt, invoke, codegen_);
2457}
2458
2459void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
2460 GenCAS(Primitive::kPrimLong, invoke, codegen_);
2461}
2462
2463void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01002464 // The only read barrier implementation supporting the
2465 // UnsafeCASObject intrinsic is the Baker-style read barriers.
2466 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01002467
Mark Mendell58d25fd2015-04-03 14:52:31 -04002468 GenCAS(Primitive::kPrimNot, invoke, codegen_);
2469}
2470
2471void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
2472 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2473 LocationSummary::kNoCall,
2474 kIntrinsified);
2475 locations->SetInAt(0, Location::RequiresRegister());
2476 locations->SetOut(Location::SameAsFirstInput());
2477 locations->AddTemp(Location::RequiresRegister());
2478}
2479
2480static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2481 X86Assembler* assembler) {
2482 Immediate imm_shift(shift);
2483 Immediate imm_mask(mask);
2484 __ movl(temp, reg);
2485 __ shrl(reg, imm_shift);
2486 __ andl(temp, imm_mask);
2487 __ andl(reg, imm_mask);
2488 __ shll(temp, imm_shift);
2489 __ orl(reg, temp);
2490}
2491
2492void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002493 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002494 LocationSummary* locations = invoke->GetLocations();
2495
2496 Register reg = locations->InAt(0).AsRegister<Register>();
2497 Register temp = locations->GetTemp(0).AsRegister<Register>();
2498
2499 /*
2500 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2501 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2502 * compared to generic luni implementation which has 5 rounds of swapping bits.
2503 * x = bswap x
2504 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2505 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2506 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2507 */
2508 __ bswapl(reg);
2509 SwapBits(reg, temp, 1, 0x55555555, assembler);
2510 SwapBits(reg, temp, 2, 0x33333333, assembler);
2511 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2512}
2513
2514void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2515 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2516 LocationSummary::kNoCall,
2517 kIntrinsified);
2518 locations->SetInAt(0, Location::RequiresRegister());
2519 locations->SetOut(Location::SameAsFirstInput());
2520 locations->AddTemp(Location::RequiresRegister());
2521}
2522
2523void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002524 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002525 LocationSummary* locations = invoke->GetLocations();
2526
2527 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2528 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2529 Register temp = locations->GetTemp(0).AsRegister<Register>();
2530
2531 // We want to swap high/low, then bswap each one, and then do the same
2532 // as a 32 bit reverse.
2533 // Exchange high and low.
2534 __ movl(temp, reg_low);
2535 __ movl(reg_low, reg_high);
2536 __ movl(reg_high, temp);
2537
2538 // bit-reverse low
2539 __ bswapl(reg_low);
2540 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2541 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2542 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2543
2544 // bit-reverse high
2545 __ bswapl(reg_high);
2546 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2547 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2548 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2549}
2550
Aart Bikc39dac12016-01-21 08:59:48 -08002551static void CreateBitCountLocations(
2552 ArenaAllocator* arena, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) {
2553 if (!codegen->GetInstructionSetFeatures().HasPopCnt()) {
2554 // Do nothing if there is no popcnt support. This results in generating
2555 // a call for the intrinsic rather than direct code.
2556 return;
2557 }
2558 LocationSummary* locations = new (arena) LocationSummary(invoke,
2559 LocationSummary::kNoCall,
2560 kIntrinsified);
2561 if (is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002562 locations->AddTemp(Location::RequiresRegister());
Aart Bikc39dac12016-01-21 08:59:48 -08002563 }
Aart Bik2a946072016-01-21 12:49:00 -08002564 locations->SetInAt(0, Location::Any());
Aart Bikc39dac12016-01-21 08:59:48 -08002565 locations->SetOut(Location::RequiresRegister());
2566}
2567
Aart Bika19616e2016-02-01 18:57:58 -08002568static void GenBitCount(X86Assembler* assembler,
2569 CodeGeneratorX86* codegen,
2570 HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002571 LocationSummary* locations = invoke->GetLocations();
2572 Location src = locations->InAt(0);
2573 Register out = locations->Out().AsRegister<Register>();
2574
2575 if (invoke->InputAt(0)->IsConstant()) {
2576 // Evaluate this at compile time.
2577 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
Roland Levillainfa3912e2016-04-01 18:21:55 +01002578 int32_t result = is_long
Aart Bikc39dac12016-01-21 08:59:48 -08002579 ? POPCOUNT(static_cast<uint64_t>(value))
2580 : POPCOUNT(static_cast<uint32_t>(value));
Roland Levillainfa3912e2016-04-01 18:21:55 +01002581 codegen->Load32BitValue(out, result);
Aart Bikc39dac12016-01-21 08:59:48 -08002582 return;
2583 }
2584
2585 // Handle the non-constant cases.
2586 if (!is_long) {
2587 if (src.IsRegister()) {
2588 __ popcntl(out, src.AsRegister<Register>());
2589 } else {
2590 DCHECK(src.IsStackSlot());
2591 __ popcntl(out, Address(ESP, src.GetStackIndex()));
2592 }
Aart Bik2a946072016-01-21 12:49:00 -08002593 } else {
2594 // The 64-bit case needs to worry about two parts.
2595 Register temp = locations->GetTemp(0).AsRegister<Register>();
2596 if (src.IsRegisterPair()) {
2597 __ popcntl(temp, src.AsRegisterPairLow<Register>());
2598 __ popcntl(out, src.AsRegisterPairHigh<Register>());
2599 } else {
2600 DCHECK(src.IsDoubleStackSlot());
2601 __ popcntl(temp, Address(ESP, src.GetStackIndex()));
2602 __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize)));
2603 }
2604 __ addl(out, temp);
Aart Bikc39dac12016-01-21 08:59:48 -08002605 }
Aart Bikc39dac12016-01-21 08:59:48 -08002606}
2607
2608void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) {
2609 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ false);
2610}
2611
2612void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002613 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002614}
2615
2616void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) {
2617 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ true);
2618}
2619
2620void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002621 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002622}
2623
Mark Mendelld5897672015-08-12 21:16:41 -04002624static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2625 LocationSummary* locations = new (arena) LocationSummary(invoke,
2626 LocationSummary::kNoCall,
2627 kIntrinsified);
2628 if (is_long) {
2629 locations->SetInAt(0, Location::RequiresRegister());
2630 } else {
2631 locations->SetInAt(0, Location::Any());
2632 }
2633 locations->SetOut(Location::RequiresRegister());
2634}
2635
Aart Bika19616e2016-02-01 18:57:58 -08002636static void GenLeadingZeros(X86Assembler* assembler,
2637 CodeGeneratorX86* codegen,
2638 HInvoke* invoke, bool is_long) {
Mark Mendelld5897672015-08-12 21:16:41 -04002639 LocationSummary* locations = invoke->GetLocations();
2640 Location src = locations->InAt(0);
2641 Register out = locations->Out().AsRegister<Register>();
2642
2643 if (invoke->InputAt(0)->IsConstant()) {
2644 // Evaluate this at compile time.
2645 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2646 if (value == 0) {
2647 value = is_long ? 64 : 32;
2648 } else {
2649 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2650 }
Aart Bika19616e2016-02-01 18:57:58 -08002651 codegen->Load32BitValue(out, value);
Mark Mendelld5897672015-08-12 21:16:41 -04002652 return;
2653 }
2654
2655 // Handle the non-constant cases.
2656 if (!is_long) {
2657 if (src.IsRegister()) {
2658 __ bsrl(out, src.AsRegister<Register>());
2659 } else {
2660 DCHECK(src.IsStackSlot());
2661 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2662 }
2663
2664 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002665 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002666 __ j(kEqual, &all_zeroes);
2667
2668 // Correct the result from BSR to get the final CLZ result.
2669 __ xorl(out, Immediate(31));
2670 __ jmp(&done);
2671
2672 // Fix the zero case with the expected result.
2673 __ Bind(&all_zeroes);
2674 __ movl(out, Immediate(32));
2675
2676 __ Bind(&done);
2677 return;
2678 }
2679
2680 // 64 bit case needs to worry about both parts of the register.
2681 DCHECK(src.IsRegisterPair());
2682 Register src_lo = src.AsRegisterPairLow<Register>();
2683 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002684 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002685
2686 // Is the high word zero?
2687 __ testl(src_hi, src_hi);
2688 __ j(kEqual, &handle_low);
2689
2690 // High word is not zero. We know that the BSR result is defined in this case.
2691 __ bsrl(out, src_hi);
2692
2693 // Correct the result from BSR to get the final CLZ result.
2694 __ xorl(out, Immediate(31));
2695 __ jmp(&done);
2696
2697 // High word was zero. We have to compute the low word count and add 32.
2698 __ Bind(&handle_low);
2699 __ bsrl(out, src_lo);
2700 __ j(kEqual, &all_zeroes);
2701
2702 // We had a valid result. Use an XOR to both correct the result and add 32.
2703 __ xorl(out, Immediate(63));
2704 __ jmp(&done);
2705
2706 // All zero case.
2707 __ Bind(&all_zeroes);
2708 __ movl(out, Immediate(64));
2709
2710 __ Bind(&done);
2711}
2712
2713void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2714 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2715}
2716
2717void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002718 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002719}
2720
2721void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2722 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2723}
2724
2725void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002726 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002727}
2728
Mark Mendell2d554792015-09-15 21:45:18 -04002729static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2730 LocationSummary* locations = new (arena) LocationSummary(invoke,
2731 LocationSummary::kNoCall,
2732 kIntrinsified);
2733 if (is_long) {
2734 locations->SetInAt(0, Location::RequiresRegister());
2735 } else {
2736 locations->SetInAt(0, Location::Any());
2737 }
2738 locations->SetOut(Location::RequiresRegister());
2739}
2740
Aart Bika19616e2016-02-01 18:57:58 -08002741static void GenTrailingZeros(X86Assembler* assembler,
2742 CodeGeneratorX86* codegen,
2743 HInvoke* invoke, bool is_long) {
Mark Mendell2d554792015-09-15 21:45:18 -04002744 LocationSummary* locations = invoke->GetLocations();
2745 Location src = locations->InAt(0);
2746 Register out = locations->Out().AsRegister<Register>();
2747
2748 if (invoke->InputAt(0)->IsConstant()) {
2749 // Evaluate this at compile time.
2750 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2751 if (value == 0) {
2752 value = is_long ? 64 : 32;
2753 } else {
2754 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2755 }
Aart Bika19616e2016-02-01 18:57:58 -08002756 codegen->Load32BitValue(out, value);
Mark Mendell2d554792015-09-15 21:45:18 -04002757 return;
2758 }
2759
2760 // Handle the non-constant cases.
2761 if (!is_long) {
2762 if (src.IsRegister()) {
2763 __ bsfl(out, src.AsRegister<Register>());
2764 } else {
2765 DCHECK(src.IsStackSlot());
2766 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2767 }
2768
2769 // BSF sets ZF if the input was zero, and the output is undefined.
2770 NearLabel done;
2771 __ j(kNotEqual, &done);
2772
2773 // Fix the zero case with the expected result.
2774 __ movl(out, Immediate(32));
2775
2776 __ Bind(&done);
2777 return;
2778 }
2779
2780 // 64 bit case needs to worry about both parts of the register.
2781 DCHECK(src.IsRegisterPair());
2782 Register src_lo = src.AsRegisterPairLow<Register>();
2783 Register src_hi = src.AsRegisterPairHigh<Register>();
2784 NearLabel done, all_zeroes;
2785
2786 // If the low word is zero, then ZF will be set. If not, we have the answer.
2787 __ bsfl(out, src_lo);
2788 __ j(kNotEqual, &done);
2789
2790 // Low word was zero. We have to compute the high word count and add 32.
2791 __ bsfl(out, src_hi);
2792 __ j(kEqual, &all_zeroes);
2793
2794 // We had a valid result. Add 32 to account for the low word being zero.
2795 __ addl(out, Immediate(32));
2796 __ jmp(&done);
2797
2798 // All zero case.
2799 __ Bind(&all_zeroes);
2800 __ movl(out, Immediate(64));
2801
2802 __ Bind(&done);
2803}
2804
2805void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2806 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2807}
2808
2809void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002810 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002811}
2812
2813void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2814 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2815}
2816
2817void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002818 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002819}
2820
Serguei Katkov288c7a82016-05-16 11:53:15 +06002821void IntrinsicLocationsBuilderX86::VisitReferenceGetReferent(HInvoke* invoke) {
2822 if (kEmitCompilerReadBarrier) {
2823 // Do not intrinsify this call with the read barrier configuration.
2824 return;
2825 }
2826 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2827 LocationSummary::kCallOnSlowPath,
2828 kIntrinsified);
2829 locations->SetInAt(0, Location::RequiresRegister());
2830 locations->SetOut(Location::SameAsFirstInput());
2831 locations->AddTemp(Location::RequiresRegister());
2832}
2833
2834void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
2835 DCHECK(!kEmitCompilerReadBarrier);
2836 LocationSummary* locations = invoke->GetLocations();
2837 X86Assembler* assembler = GetAssembler();
2838
2839 Register obj = locations->InAt(0).AsRegister<Register>();
2840 Register out = locations->Out().AsRegister<Register>();
2841
2842 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2843 codegen_->AddSlowPath(slow_path);
2844
2845 // Load ArtMethod first.
2846 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2847 DCHECK(invoke_direct != nullptr);
2848 Location temp_loc = codegen_->GenerateCalleeMethodStaticOrDirectCall(
2849 invoke_direct, locations->GetTemp(0));
2850 DCHECK(temp_loc.Equals(locations->GetTemp(0)));
2851 Register temp = temp_loc.AsRegister<Register>();
2852
2853 // Now get declaring class.
2854 __ movl(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
2855
2856 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2857 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2858 DCHECK_NE(slow_path_flag_offset, 0u);
2859 DCHECK_NE(disable_flag_offset, 0u);
2860 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2861
2862 // Check static flags preventing us for using intrinsic.
2863 if (slow_path_flag_offset == disable_flag_offset + 1) {
2864 __ cmpw(Address(temp, disable_flag_offset), Immediate(0));
2865 __ j(kNotEqual, slow_path->GetEntryLabel());
2866 } else {
2867 __ cmpb(Address(temp, disable_flag_offset), Immediate(0));
2868 __ j(kNotEqual, slow_path->GetEntryLabel());
2869 __ cmpb(Address(temp, slow_path_flag_offset), Immediate(0));
2870 __ j(kNotEqual, slow_path->GetEntryLabel());
2871 }
2872
2873 // Fast path.
2874 __ movl(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
2875 codegen_->MaybeRecordImplicitNullCheck(invoke);
2876 __ MaybeUnpoisonHeapReference(out);
2877 __ Bind(slow_path->GetExitLabel());
2878}
2879
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002880static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) {
2881 return instruction->InputAt(input0) == instruction->InputAt(input1);
2882}
2883
Roland Levillain9cc0ea82017-03-16 11:25:59 +00002884// Compute base address for the System.arraycopy intrinsic in `base`.
2885static void GenSystemArrayCopyBaseAddress(X86Assembler* assembler,
2886 Primitive::Type type,
2887 const Register& array,
2888 const Location& pos,
2889 const Register& base) {
2890 // This routine is only used by the SystemArrayCopy intrinsic at the
2891 // moment. We can allow Primitive::kPrimNot as `type` to implement
2892 // the SystemArrayCopyChar intrinsic.
2893 DCHECK_EQ(type, Primitive::kPrimNot);
2894 const int32_t element_size = Primitive::ComponentSize(type);
2895 const ScaleFactor scale_factor = static_cast<ScaleFactor>(Primitive::ComponentSizeShift(type));
2896 const uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
2897
2898 if (pos.IsConstant()) {
2899 int32_t constant = pos.GetConstant()->AsIntConstant()->GetValue();
2900 __ leal(base, Address(array, element_size * constant + data_offset));
2901 } else {
2902 __ leal(base, Address(array, pos.AsRegister<Register>(), scale_factor, data_offset));
2903 }
2904}
2905
2906// Compute end source address for the System.arraycopy intrinsic in `end`.
2907static void GenSystemArrayCopyEndAddress(X86Assembler* assembler,
2908 Primitive::Type type,
2909 const Location& copy_length,
2910 const Register& base,
2911 const Register& end) {
2912 // This routine is only used by the SystemArrayCopy intrinsic at the
2913 // moment. We can allow Primitive::kPrimNot as `type` to implement
2914 // the SystemArrayCopyChar intrinsic.
2915 DCHECK_EQ(type, Primitive::kPrimNot);
2916 const int32_t element_size = Primitive::ComponentSize(type);
2917 const ScaleFactor scale_factor = static_cast<ScaleFactor>(Primitive::ComponentSizeShift(type));
2918
2919 if (copy_length.IsConstant()) {
2920 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
2921 __ leal(end, Address(base, element_size * constant));
2922 } else {
2923 __ leal(end, Address(base, copy_length.AsRegister<Register>(), scale_factor, 0));
2924 }
2925}
2926
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002927void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002928 // The only read barrier implementation supporting the
2929 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2930 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002931 return;
2932 }
2933
2934 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
2935 if (invoke->GetLocations() != nullptr) {
2936 // Need a byte register for marking.
2937 invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX));
2938
2939 static constexpr size_t kSrc = 0;
2940 static constexpr size_t kSrcPos = 1;
2941 static constexpr size_t kDest = 2;
2942 static constexpr size_t kDestPos = 3;
2943 static constexpr size_t kLength = 4;
2944
2945 if (!invoke->InputAt(kSrcPos)->IsIntConstant() &&
2946 !invoke->InputAt(kDestPos)->IsIntConstant() &&
2947 !invoke->InputAt(kLength)->IsIntConstant()) {
2948 if (!IsSameInput(invoke, kSrcPos, kDestPos) &&
2949 !IsSameInput(invoke, kSrcPos, kLength) &&
2950 !IsSameInput(invoke, kDestPos, kLength) &&
2951 !IsSameInput(invoke, kSrc, kDest)) {
2952 // Not enough registers, make the length also take a stack slot.
2953 invoke->GetLocations()->SetInAt(kLength, Location::Any());
2954 }
2955 }
2956 }
2957}
2958
2959void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002960 // The only read barrier implementation supporting the
2961 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2962 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002963
2964 X86Assembler* assembler = GetAssembler();
2965 LocationSummary* locations = invoke->GetLocations();
2966
2967 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2968 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2969 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2970 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002971 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002972
2973 Register src = locations->InAt(0).AsRegister<Register>();
2974 Location src_pos = locations->InAt(1);
2975 Register dest = locations->InAt(2).AsRegister<Register>();
2976 Location dest_pos = locations->InAt(3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002977 Location length_arg = locations->InAt(4);
2978 Location length = length_arg;
2979 Location temp1_loc = locations->GetTemp(0);
2980 Register temp1 = temp1_loc.AsRegister<Register>();
2981 Location temp2_loc = locations->GetTemp(1);
2982 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002983
Roland Levillain0b671c02016-08-19 12:02:34 +01002984 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2985 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002986
2987 NearLabel conditions_on_positions_validated;
2988 SystemArrayCopyOptimizations optimizations(invoke);
2989
2990 // If source and destination are the same, we go to slow path if we need to do
2991 // forward copying.
2992 if (src_pos.IsConstant()) {
2993 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2994 if (dest_pos.IsConstant()) {
2995 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2996 if (optimizations.GetDestinationIsSource()) {
2997 // Checked when building locations.
2998 DCHECK_GE(src_pos_constant, dest_pos_constant);
2999 } else if (src_pos_constant < dest_pos_constant) {
3000 __ cmpl(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01003001 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003002 }
3003 } else {
3004 if (!optimizations.GetDestinationIsSource()) {
3005 __ cmpl(src, dest);
3006 __ j(kNotEqual, &conditions_on_positions_validated);
3007 }
3008 __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01003009 __ j(kGreater, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003010 }
3011 } else {
3012 if (!optimizations.GetDestinationIsSource()) {
3013 __ cmpl(src, dest);
3014 __ j(kNotEqual, &conditions_on_positions_validated);
3015 }
3016 if (dest_pos.IsConstant()) {
3017 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3018 __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01003019 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003020 } else {
3021 __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01003022 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003023 }
3024 }
3025
3026 __ Bind(&conditions_on_positions_validated);
3027
3028 if (!optimizations.GetSourceIsNotNull()) {
3029 // Bail out if the source is null.
3030 __ testl(src, src);
Roland Levillain0b671c02016-08-19 12:02:34 +01003031 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003032 }
3033
3034 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
3035 // Bail out if the destination is null.
3036 __ testl(dest, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01003037 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003038 }
3039
Roland Levillain0b671c02016-08-19 12:02:34 +01003040 Location temp3_loc = locations->GetTemp(2);
3041 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003042 if (length.IsStackSlot()) {
3043 __ movl(temp3, Address(ESP, length.GetStackIndex()));
3044 length = Location::RegisterLocation(temp3);
3045 }
3046
3047 // If the length is negative, bail out.
3048 // We have already checked in the LocationsBuilder for the constant case.
3049 if (!length.IsConstant() &&
3050 !optimizations.GetCountIsSourceLength() &&
3051 !optimizations.GetCountIsDestinationLength()) {
3052 __ testl(length.AsRegister<Register>(), length.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01003053 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003054 }
3055
3056 // Validity checks: source.
3057 CheckPosition(assembler,
3058 src_pos,
3059 src,
3060 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01003061 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003062 temp1,
3063 optimizations.GetCountIsSourceLength());
3064
3065 // Validity checks: dest.
3066 CheckPosition(assembler,
3067 dest_pos,
3068 dest,
3069 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01003070 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003071 temp1,
3072 optimizations.GetCountIsDestinationLength());
3073
3074 if (!optimizations.GetDoesNotNeedTypeCheck()) {
3075 // Check whether all elements of the source array are assignable to the component
3076 // type of the destination array. We do two checks: the classes are the same,
3077 // or the destination is Object[]. If none of these checks succeed, we go to the
3078 // slow path.
Roland Levillain0b671c02016-08-19 12:02:34 +01003079
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003080 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillain0b671c02016-08-19 12:02:34 +01003081 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3082 // /* HeapReference<Class> */ temp1 = src->klass_
3083 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003084 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003085 // Bail out if the source is not a non primitive array.
3086 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3087 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003088 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003089 __ testl(temp1, temp1);
3090 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3091 // If heap poisoning is enabled, `temp1` has been unpoisoned
3092 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3093 } else {
3094 // /* HeapReference<Class> */ temp1 = src->klass_
3095 __ movl(temp1, Address(src, class_offset));
3096 __ MaybeUnpoisonHeapReference(temp1);
3097 // Bail out if the source is not a non primitive array.
3098 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3099 __ movl(temp1, Address(temp1, component_offset));
3100 __ testl(temp1, temp1);
3101 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3102 __ MaybeUnpoisonHeapReference(temp1);
3103 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003104 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01003105 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003106 }
3107
Roland Levillain0b671c02016-08-19 12:02:34 +01003108 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3109 if (length.Equals(Location::RegisterLocation(temp3))) {
3110 // When Baker read barriers are enabled, register `temp3`,
3111 // which in the present case contains the `length` parameter,
3112 // will be overwritten below. Make the `length` location
3113 // reference the original stack location; it will be moved
3114 // back to `temp3` later if necessary.
3115 DCHECK(length_arg.IsStackSlot());
3116 length = length_arg;
3117 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003118
Roland Levillain0b671c02016-08-19 12:02:34 +01003119 // /* HeapReference<Class> */ temp1 = dest->klass_
3120 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003121 invoke, temp1_loc, dest, class_offset, /* needs_null_check */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003122
Roland Levillain0b671c02016-08-19 12:02:34 +01003123 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
3124 // Bail out if the destination is not a non primitive array.
3125 //
3126 // Register `temp1` is not trashed by the read barrier emitted
3127 // by GenerateFieldLoadWithBakerReadBarrier below, as that
3128 // method produces a call to a ReadBarrierMarkRegX entry point,
3129 // which saves all potentially live registers, including
3130 // temporaries such a `temp1`.
3131 // /* HeapReference<Class> */ temp2 = temp1->component_type_
3132 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003133 invoke, temp2_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003134 __ testl(temp2, temp2);
3135 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3136 // If heap poisoning is enabled, `temp2` has been unpoisoned
3137 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3138 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
3139 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3140 }
3141
3142 // For the same reason given earlier, `temp1` is not trashed by the
3143 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
3144 // /* HeapReference<Class> */ temp2 = src->klass_
3145 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003146 invoke, temp2_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003147 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
3148 __ cmpl(temp1, temp2);
3149
3150 if (optimizations.GetDestinationIsTypedObjectArray()) {
3151 NearLabel do_copy;
3152 __ j(kEqual, &do_copy);
3153 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3154 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003155 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003156 // We do not need to emit a read barrier for the following
3157 // heap reference load, as `temp1` is only used in a
3158 // comparison with null below, and this reference is not
3159 // kept afterwards.
3160 __ cmpl(Address(temp1, super_offset), Immediate(0));
3161 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3162 __ Bind(&do_copy);
3163 } else {
3164 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3165 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003166 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01003167 // Non read barrier code.
3168
3169 // /* HeapReference<Class> */ temp1 = dest->klass_
3170 __ movl(temp1, Address(dest, class_offset));
3171 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
3172 __ MaybeUnpoisonHeapReference(temp1);
3173 // Bail out if the destination is not a non primitive array.
3174 // /* HeapReference<Class> */ temp2 = temp1->component_type_
3175 __ movl(temp2, Address(temp1, component_offset));
3176 __ testl(temp2, temp2);
3177 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3178 __ MaybeUnpoisonHeapReference(temp2);
3179 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
3180 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3181 // Re-poison the heap reference to make the compare instruction below
3182 // compare two poisoned references.
3183 __ PoisonHeapReference(temp1);
3184 }
3185
3186 // Note: if heap poisoning is on, we are comparing two poisoned references here.
3187 __ cmpl(temp1, Address(src, class_offset));
3188
3189 if (optimizations.GetDestinationIsTypedObjectArray()) {
3190 NearLabel do_copy;
3191 __ j(kEqual, &do_copy);
3192 __ MaybeUnpoisonHeapReference(temp1);
3193 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3194 __ movl(temp1, Address(temp1, component_offset));
3195 __ MaybeUnpoisonHeapReference(temp1);
3196 __ cmpl(Address(temp1, super_offset), Immediate(0));
3197 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3198 __ Bind(&do_copy);
3199 } else {
3200 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3201 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003202 }
3203 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
3204 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
3205 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01003206 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3207 // /* HeapReference<Class> */ temp1 = src->klass_
3208 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003209 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003210 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3211 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003212 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003213 __ testl(temp1, temp1);
3214 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3215 // If heap poisoning is enabled, `temp1` has been unpoisoned
3216 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3217 } else {
3218 // /* HeapReference<Class> */ temp1 = src->klass_
3219 __ movl(temp1, Address(src, class_offset));
3220 __ MaybeUnpoisonHeapReference(temp1);
3221 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3222 __ movl(temp1, Address(temp1, component_offset));
3223 __ testl(temp1, temp1);
3224 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3225 __ MaybeUnpoisonHeapReference(temp1);
3226 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003227 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01003228 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003229 }
3230
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003231 const Primitive::Type type = Primitive::kPrimNot;
3232 const int32_t element_size = Primitive::ComponentSize(type);
3233
Roland Levillain0b671c02016-08-19 12:02:34 +01003234 // Compute the base source address in `temp1`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003235 GenSystemArrayCopyBaseAddress(GetAssembler(), type, src, src_pos, temp1);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003236
Roland Levillain0b671c02016-08-19 12:02:34 +01003237 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3238 // If it is needed (in the case of the fast-path loop), the base
3239 // destination address is computed later, as `temp2` is used for
3240 // intermediate computations.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003241
Roland Levillain0b671c02016-08-19 12:02:34 +01003242 // Compute the end source address in `temp3`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003243 if (length.IsStackSlot()) {
3244 // Location `length` is again pointing at a stack slot, as
3245 // register `temp3` (which was containing the length parameter
3246 // earlier) has been overwritten; restore it now
3247 DCHECK(length.Equals(length_arg));
3248 __ movl(temp3, Address(ESP, length.GetStackIndex()));
3249 length = Location::RegisterLocation(temp3);
Roland Levillain0b671c02016-08-19 12:02:34 +01003250 }
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003251 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003252
Roland Levillain0b671c02016-08-19 12:02:34 +01003253 // SystemArrayCopy implementation for Baker read barriers (see
3254 // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier):
3255 //
3256 // if (src_ptr != end_ptr) {
3257 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
3258 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07003259 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain0b671c02016-08-19 12:02:34 +01003260 // if (is_gray) {
3261 // // Slow-path copy.
3262 // for (size_t i = 0; i != length; ++i) {
3263 // dest_array[dest_pos + i] =
3264 // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i])));
3265 // }
3266 // } else {
3267 // // Fast-path copy.
3268 // do {
3269 // *dest_ptr++ = *src_ptr++;
3270 // } while (src_ptr != end_ptr)
3271 // }
3272 // }
3273
3274 NearLabel loop, done;
3275
3276 // Don't enter copy loop if `length == 0`.
3277 __ cmpl(temp1, temp3);
3278 __ j(kEqual, &done);
3279
Vladimir Marko953437b2016-08-24 08:30:46 +00003280 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07003281 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
3282 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00003283 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
3284 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
3285 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
3286
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07003287 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00003288 // goto slow_path;
3289 // At this point, just do the "if" and make sure that flags are preserved until the branch.
3290 __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain0b671c02016-08-19 12:02:34 +01003291
3292 // Load fence to prevent load-load reordering.
3293 // Note that this is a no-op, thanks to the x86 memory model.
3294 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3295
3296 // Slow path used to copy array when `src` is gray.
3297 SlowPathCode* read_barrier_slow_path =
3298 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke);
3299 codegen_->AddSlowPath(read_barrier_slow_path);
3300
Vladimir Marko953437b2016-08-24 08:30:46 +00003301 // We have done the "if" of the gray bit check above, now branch based on the flags.
3302 __ j(kNotZero, read_barrier_slow_path->GetEntryLabel());
Roland Levillain0b671c02016-08-19 12:02:34 +01003303
3304 // Fast-path copy.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003305 // Compute the base destination address in `temp2`.
3306 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
Roland Levillain0b671c02016-08-19 12:02:34 +01003307 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3308 // poison/unpoison.
3309 __ Bind(&loop);
3310 __ pushl(Address(temp1, 0));
3311 __ cfi().AdjustCFAOffset(4);
3312 __ popl(Address(temp2, 0));
3313 __ cfi().AdjustCFAOffset(-4);
3314 __ addl(temp1, Immediate(element_size));
3315 __ addl(temp2, Immediate(element_size));
3316 __ cmpl(temp1, temp3);
3317 __ j(kNotEqual, &loop);
3318
3319 __ Bind(read_barrier_slow_path->GetExitLabel());
3320 __ Bind(&done);
3321 } else {
3322 // Non read barrier code.
Roland Levillain0b671c02016-08-19 12:02:34 +01003323 // Compute the base destination address in `temp2`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003324 GenSystemArrayCopyBaseAddress(GetAssembler(), type, dest, dest_pos, temp2);
Roland Levillain0b671c02016-08-19 12:02:34 +01003325 // Compute the end source address in `temp3`.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003326 GenSystemArrayCopyEndAddress(GetAssembler(), type, length, temp1, temp3);
Roland Levillain0b671c02016-08-19 12:02:34 +01003327 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3328 // poison/unpoison.
3329 NearLabel loop, done;
3330 __ cmpl(temp1, temp3);
3331 __ j(kEqual, &done);
3332 __ Bind(&loop);
3333 __ pushl(Address(temp1, 0));
3334 __ cfi().AdjustCFAOffset(4);
3335 __ popl(Address(temp2, 0));
3336 __ cfi().AdjustCFAOffset(-4);
3337 __ addl(temp1, Immediate(element_size));
3338 __ addl(temp2, Immediate(element_size));
3339 __ cmpl(temp1, temp3);
3340 __ j(kNotEqual, &loop);
3341 __ Bind(&done);
3342 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003343
3344 // We only need one card marking on the destination array.
Roland Levillain9cc0ea82017-03-16 11:25:59 +00003345 codegen_->MarkGCCard(temp1, temp2, dest, Register(kNoRegister), /* value_can_be_null */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003346
Roland Levillain0b671c02016-08-19 12:02:34 +01003347 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003348}
3349
Nicolas Geoffray331605a2017-03-01 11:01:41 +00003350void IntrinsicLocationsBuilderX86::VisitIntegerValueOf(HInvoke* invoke) {
3351 InvokeRuntimeCallingConvention calling_convention;
3352 IntrinsicVisitor::ComputeIntegerValueOfLocations(
3353 invoke,
3354 codegen_,
3355 Location::RegisterLocation(EAX),
3356 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3357}
3358
3359void IntrinsicCodeGeneratorX86::VisitIntegerValueOf(HInvoke* invoke) {
3360 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
3361 LocationSummary* locations = invoke->GetLocations();
3362 X86Assembler* assembler = GetAssembler();
3363
3364 Register out = locations->Out().AsRegister<Register>();
3365 InvokeRuntimeCallingConvention calling_convention;
3366 if (invoke->InputAt(0)->IsConstant()) {
3367 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
3368 if (value >= info.low && value <= info.high) {
3369 // Just embed the j.l.Integer in the code.
3370 ScopedObjectAccess soa(Thread::Current());
3371 mirror::Object* boxed = info.cache->Get(value + (-info.low));
3372 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
3373 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
3374 __ movl(out, Immediate(address));
3375 } else {
3376 // Allocate and initialize a new j.l.Integer.
3377 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
3378 // JIT object table.
3379 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3380 __ movl(calling_convention.GetRegisterAt(0), Immediate(address));
3381 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3382 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3383 __ movl(Address(out, info.value_offset), Immediate(value));
3384 }
3385 } else {
3386 Register in = locations->InAt(0).AsRegister<Register>();
3387 // Check bounds of our cache.
3388 __ leal(out, Address(in, -info.low));
3389 __ cmpl(out, Immediate(info.high - info.low + 1));
3390 NearLabel allocate, done;
3391 __ j(kAboveEqual, &allocate);
3392 // If the value is within the bounds, load the j.l.Integer directly from the array.
3393 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3394 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
3395 __ movl(out, Address(out, TIMES_4, data_offset + address));
3396 __ MaybeUnpoisonHeapReference(out);
3397 __ jmp(&done);
3398 __ Bind(&allocate);
3399 // Otherwise allocate and initialize a new j.l.Integer.
3400 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
3401 __ movl(calling_convention.GetRegisterAt(0), Immediate(address));
3402 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
3403 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
3404 __ movl(Address(out, info.value_offset), in);
3405 __ Bind(&done);
3406 }
3407}
3408
Aart Bik2f9fcc92016-03-01 15:16:54 -08003409UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003410UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
3411UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite)
3412UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit)
3413UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit)
3414UNIMPLEMENTED_INTRINSIC(X86, IntegerLowestOneBit)
3415UNIMPLEMENTED_INTRINSIC(X86, LongLowestOneBit)
Mark Mendell09ed1a32015-03-25 08:30:06 -04003416
Aart Bikff7d89c2016-11-07 08:49:28 -08003417UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOf);
3418UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08003419UNIMPLEMENTED_INTRINSIC(X86, StringBufferAppend);
3420UNIMPLEMENTED_INTRINSIC(X86, StringBufferLength);
3421UNIMPLEMENTED_INTRINSIC(X86, StringBufferToString);
3422UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppend);
3423UNIMPLEMENTED_INTRINSIC(X86, StringBuilderLength);
3424UNIMPLEMENTED_INTRINSIC(X86, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08003425
Aart Bik0e54c012016-03-04 12:08:31 -08003426// 1.8.
3427UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt)
3428UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong)
3429UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt)
3430UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong)
3431UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003432
Aart Bik2f9fcc92016-03-01 15:16:54 -08003433UNREACHABLE_INTRINSICS(X86)
Roland Levillain4d027112015-07-01 15:41:14 +01003434
3435#undef __
3436
Mark Mendell09ed1a32015-03-25 08:30:06 -04003437} // namespace x86
3438} // namespace art