blob: 2f946e4263b80517d344676c94c62d509c7df60b [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000023#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040025#include "intrinsics.h"
26#include "intrinsics_x86.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070027#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070028#include "mirror/class-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010029#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000030#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000032#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010033#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000034
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010036
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000040namespace x86 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050044static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010045
Mark Mendell24f2dfa2015-01-14 19:51:45 -050046static constexpr int kC2ConditionMask = 0x400;
47
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000048static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000049
Roland Levillain7cbd27f2016-08-11 23:53:33 +010050// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
51#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070052#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010053
Andreas Gampe85b62f22015-09-09 13:15:38 -070054class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010055 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000056 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Alexandre Rames2ed20af2015-03-06 13:55:35 +000058 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010059 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000061 if (instruction_->CanThrowIntoCatchBlock()) {
62 // Live registers will be restored in the catch block if caught.
63 SaveLiveRegisters(codegen, instruction_->GetLocations());
64 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010065 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010066 instruction_,
67 instruction_->GetDexPc(),
68 this);
Roland Levillain888d0672015-11-23 18:53:50 +000069 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010070 }
71
Alexandre Rames8158f282015-08-07 10:26:17 +010072 bool IsFatal() const OVERRIDE { return true; }
73
Alexandre Rames9931f312015-06-19 14:47:01 +010074 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86"; }
75
Nicolas Geoffraye5038322014-07-04 09:41:32 +010076 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
78};
79
Andreas Gampe85b62f22015-09-09 13:15:38 -070080class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000081 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000082 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000083
Alexandre Rames2ed20af2015-03-06 13:55:35 +000084 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010085 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000086 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010087 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000088 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000089 }
90
Alexandre Rames8158f282015-08-07 10:26:17 +010091 bool IsFatal() const OVERRIDE { return true; }
92
Alexandre Rames9931f312015-06-19 14:47:01 +010093 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86"; }
94
Calin Juravled0d48522014-11-04 16:40:20 +000095 private:
Calin Juravled0d48522014-11-04 16:40:20 +000096 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
97};
98
Andreas Gampe85b62f22015-09-09 13:15:38 -070099class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000100 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000101 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
102 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000103
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000104 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000105 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000106 if (is_div_) {
107 __ negl(reg_);
108 } else {
109 __ movl(reg_, Immediate(0));
110 }
Calin Juravled0d48522014-11-04 16:40:20 +0000111 __ jmp(GetExitLabel());
112 }
113
Alexandre Rames9931f312015-06-19 14:47:01 +0100114 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86"; }
115
Calin Juravled0d48522014-11-04 16:40:20 +0000116 private:
117 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 bool is_div_;
119 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000120};
121
Andreas Gampe85b62f22015-09-09 13:15:38 -0700122class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100123 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000124 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100125
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000126 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100127 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100128 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100129 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000132 if (instruction_->CanThrowIntoCatchBlock()) {
133 // Live registers will be restored in the catch block if caught.
134 SaveLiveRegisters(codegen, instruction_->GetLocations());
135 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400136
137 // Are we using an array length from memory?
138 HInstruction* array_length = instruction_->InputAt(1);
139 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100140 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400141 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
142 // Load the array length into our temporary.
143 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
144 Location array_loc = array_length->GetLocations()->InAt(0);
145 Address array_len(array_loc.AsRegister<Register>(), len_offset);
146 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
147 // Check for conflicts with index.
148 if (length_loc.Equals(locations->InAt(0))) {
149 // We know we aren't using parameter 2.
150 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
151 }
152 __ movl(length_loc.AsRegister<Register>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700153 if (mirror::kUseStringCompression) {
154 __ andl(length_loc.AsRegister<Register>(), Immediate(INT32_MAX));
155 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400156 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000157 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100158 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000159 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100160 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400161 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100162 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
163 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100164 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
165 ? kQuickThrowStringBounds
166 : kQuickThrowArrayBounds;
167 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100168 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000169 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100170 }
171
Alexandre Rames8158f282015-08-07 10:26:17 +0100172 bool IsFatal() const OVERRIDE { return true; }
173
Alexandre Rames9931f312015-06-19 14:47:01 +0100174 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86"; }
175
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100177 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
178};
179
Andreas Gampe85b62f22015-09-09 13:15:38 -0700180class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000182 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000183 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000185 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100186 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000187 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100188 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000189 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100190 if (successor_ == nullptr) {
191 __ jmp(GetReturnLabel());
192 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100193 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100194 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000195 }
196
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100197 Label* GetReturnLabel() {
198 DCHECK(successor_ == nullptr);
199 return &return_label_;
200 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000201
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100202 HBasicBlock* GetSuccessor() const {
203 return successor_;
204 }
205
Alexandre Rames9931f312015-06-19 14:47:01 +0100206 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86"; }
207
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000208 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100209 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000210 Label return_label_;
211
212 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
213};
214
Vladimir Markoaad75c62016-10-03 08:46:48 +0000215class LoadStringSlowPathX86 : public SlowPathCode {
216 public:
217 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
218
219 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
220 LocationSummary* locations = instruction_->GetLocations();
221 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
222
223 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
224 __ Bind(GetEntryLabel());
225 SaveLiveRegisters(codegen, locations);
226
227 InvokeRuntimeCallingConvention calling_convention;
228 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex();
229 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index));
230 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
231 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
232 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
233 RestoreLiveRegisters(codegen, locations);
234
235 // Store the resolved String to the BSS entry.
236 Register method_address = locations->InAt(0).AsRegister<Register>();
237 __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
238 locations->Out().AsRegister<Register>());
239 Label* fixup_label = x86_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
240 __ Bind(fixup_label);
241
242 __ jmp(GetExitLabel());
243 }
244
245 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86"; }
246
247 private:
248 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
249};
250
Andreas Gampe85b62f22015-09-09 13:15:38 -0700251class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000252 public:
253 LoadClassSlowPathX86(HLoadClass* cls,
254 HInstruction* at,
255 uint32_t dex_pc,
256 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000257 : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
259 }
260
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000262 LocationSummary* locations = at_->GetLocations();
263 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
264 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000265 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266
267 InvokeRuntimeCallingConvention calling_convention;
268 __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex()));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100269 x86_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage
270 : kQuickInitializeType,
Alexandre Rames8158f282015-08-07 10:26:17 +0100271 at_, dex_pc_, this);
Roland Levillain888d0672015-11-23 18:53:50 +0000272 if (do_clinit_) {
273 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
274 } else {
275 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
276 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000277
278 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000279 Location out = locations->Out();
280 if (out.IsValid()) {
281 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
282 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000283 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000284
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000285 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 __ jmp(GetExitLabel());
287 }
288
Alexandre Rames9931f312015-06-19 14:47:01 +0100289 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86"; }
290
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000291 private:
292 // The class this slow path will load.
293 HLoadClass* const cls_;
294
295 // The instruction where this slow path is happening.
296 // (Might be the load class or an initialization check).
297 HInstruction* const at_;
298
299 // The dex PC of `at_`.
300 const uint32_t dex_pc_;
301
302 // Whether to initialize the class.
303 const bool do_clinit_;
304
305 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
306};
307
Andreas Gampe85b62f22015-09-09 13:15:38 -0700308class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000309 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000310 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000311 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000312
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000313 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000314 LocationSummary* locations = instruction_->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100315 Location object_class = instruction_->IsCheckCast() ? locations->GetTemp(0)
316 : locations->Out();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000317 DCHECK(instruction_->IsCheckCast()
318 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000319
320 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
321 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000322
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000323 if (!is_fatal_) {
324 SaveLiveRegisters(codegen, locations);
325 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000326
327 // We're moving two locations to locations that could overlap, so we need a parallel
328 // move resolver.
329 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000330 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100331 locations->InAt(1),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000332 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100333 Primitive::kPrimNot,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100334 object_class,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100335 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
336 Primitive::kPrimNot);
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000337
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000338 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100339 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100340 instruction_,
341 instruction_->GetDexPc(),
342 this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000343 CheckEntrypointTypes<
Andreas Gampe67409972016-07-19 22:34:53 -0700344 kQuickInstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 } else {
346 DCHECK(instruction_->IsCheckCast());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100347 x86_codegen->InvokeRuntime(kQuickCheckCast, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000348 CheckEntrypointTypes<kQuickCheckCast, void, const mirror::Class*, const mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000349 }
350
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000351 if (!is_fatal_) {
352 if (instruction_->IsInstanceOf()) {
353 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
354 }
355 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000356
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000357 __ jmp(GetExitLabel());
358 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000359 }
360
Alexandre Rames9931f312015-06-19 14:47:01 +0100361 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000362 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100363
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000364 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000365 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000366
367 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
368};
369
Andreas Gampe85b62f22015-09-09 13:15:38 -0700370class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700371 public:
Aart Bik42249c32016-01-07 15:33:50 -0800372 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000373 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700374
375 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +0100376 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700377 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100378 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000379 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700380 }
381
Alexandre Rames9931f312015-06-19 14:47:01 +0100382 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86"; }
383
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700384 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700385 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
386};
387
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100388class ArraySetSlowPathX86 : public SlowPathCode {
389 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000390 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100391
392 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
393 LocationSummary* locations = instruction_->GetLocations();
394 __ Bind(GetEntryLabel());
395 SaveLiveRegisters(codegen, locations);
396
397 InvokeRuntimeCallingConvention calling_convention;
398 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
399 parallel_move.AddMove(
400 locations->InAt(0),
401 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
402 Primitive::kPrimNot,
403 nullptr);
404 parallel_move.AddMove(
405 locations->InAt(1),
406 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
407 Primitive::kPrimInt,
408 nullptr);
409 parallel_move.AddMove(
410 locations->InAt(2),
411 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
412 Primitive::kPrimNot,
413 nullptr);
414 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
415
416 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100417 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000418 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100419 RestoreLiveRegisters(codegen, locations);
420 __ jmp(GetExitLabel());
421 }
422
423 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86"; }
424
425 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100426 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
427};
428
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100429// Slow path marking an object reference `ref` during a read
430// barrier. The field `obj.field` in the object `obj` holding this
431// reference does not get updated by this slow path after marking (see
432// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
433//
434// This means that after the execution of this slow path, `ref` will
435// always be up-to-date, but `obj.field` may not; i.e., after the
436// flip, `ref` will be a to-space reference, but `obj.field` will
437// probably still be a from-space reference (unless it gets updated by
438// another thread, or if another thread installed another object
439// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000440class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
441 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100442 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
443 Location ref,
444 bool unpoison_ref_before_marking)
445 : SlowPathCode(instruction),
446 ref_(ref),
447 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000448 DCHECK(kEmitCompilerReadBarrier);
449 }
450
451 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86"; }
452
453 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
454 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100455 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000456 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100457 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000458 DCHECK(instruction_->IsInstanceFieldGet() ||
459 instruction_->IsStaticFieldGet() ||
460 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100461 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000462 instruction_->IsLoadClass() ||
463 instruction_->IsLoadString() ||
464 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100465 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100466 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
467 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000468 << "Unexpected instruction in read barrier marking slow path: "
469 << instruction_->DebugName();
470
471 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100472 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000473 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100474 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000475 }
Roland Levillain4359e612016-07-20 11:32:19 +0100476 // No need to save live registers; it's taken care of by the
477 // entrypoint. Also, there is no need to update the stack mask,
478 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000479 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 DCHECK_NE(ref_reg, ESP);
481 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100482 // "Compact" slow path, saving two moves.
483 //
484 // Instead of using the standard runtime calling convention (input
485 // and output in EAX):
486 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100487 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100488 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100489 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100490 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100491 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100492 // of a dedicated entrypoint:
493 //
494 // rX <- ReadBarrierMarkRegX(rX)
495 //
496 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100498 // This runtime call does not require a stack map.
499 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000500 __ jmp(GetExitLabel());
501 }
502
503 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100504 // The location (register) of the marked object reference.
505 const Location ref_;
506 // Should the reference in `ref_` be unpoisoned prior to marking it?
507 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000508
509 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
510};
511
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100512// Slow path marking an object reference `ref` during a read barrier,
513// and if needed, atomically updating the field `obj.field` in the
514// object `obj` holding this reference after marking (contrary to
515// ReadBarrierMarkSlowPathX86 above, which never tries to update
516// `obj.field`).
517//
518// This means that after the execution of this slow path, both `ref`
519// and `obj.field` will be up-to-date; i.e., after the flip, both will
520// hold the same to-space reference (unless another thread installed
521// another object reference (different from `ref`) in `obj.field`).
522class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
523 public:
524 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
525 Location ref,
526 Register obj,
527 const Address& field_addr,
528 bool unpoison_ref_before_marking,
529 Register temp)
530 : SlowPathCode(instruction),
531 ref_(ref),
532 obj_(obj),
533 field_addr_(field_addr),
534 unpoison_ref_before_marking_(unpoison_ref_before_marking),
535 temp_(temp) {
536 DCHECK(kEmitCompilerReadBarrier);
537 }
538
539 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
540
541 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
542 LocationSummary* locations = instruction_->GetLocations();
543 Register ref_reg = ref_.AsRegister<Register>();
544 DCHECK(locations->CanCall());
545 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
546 // This slow path is only used by the UnsafeCASObject intrinsic.
547 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
548 << "Unexpected instruction in read barrier marking and field updating slow path: "
549 << instruction_->DebugName();
550 DCHECK(instruction_->GetLocations()->Intrinsified());
551 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
552
553 __ Bind(GetEntryLabel());
554 if (unpoison_ref_before_marking_) {
555 // Object* ref = ref_addr->AsMirrorPtr()
556 __ MaybeUnpoisonHeapReference(ref_reg);
557 }
558
559 // Save the old (unpoisoned) reference.
560 __ movl(temp_, ref_reg);
561
562 // No need to save live registers; it's taken care of by the
563 // entrypoint. Also, there is no need to update the stack mask,
564 // as this runtime call will not trigger a garbage collection.
565 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
566 DCHECK_NE(ref_reg, ESP);
567 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
568 // "Compact" slow path, saving two moves.
569 //
570 // Instead of using the standard runtime calling convention (input
571 // and output in EAX):
572 //
573 // EAX <- ref
574 // EAX <- ReadBarrierMark(EAX)
575 // ref <- EAX
576 //
577 // we just use rX (the register containing `ref`) as input and output
578 // of a dedicated entrypoint:
579 //
580 // rX <- ReadBarrierMarkRegX(rX)
581 //
582 int32_t entry_point_offset =
583 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
584 // This runtime call does not require a stack map.
585 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
586
587 // If the new reference is different from the old reference,
588 // update the field in the holder (`*field_addr`).
589 //
590 // Note that this field could also hold a different object, if
591 // another thread had concurrently changed it. In that case, the
592 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
593 // operation below would abort the CAS, leaving the field as-is.
594 NearLabel done;
595 __ cmpl(temp_, ref_reg);
596 __ j(kEqual, &done);
597
598 // Update the the holder's field atomically. This may fail if
599 // mutator updates before us, but it's OK. This is achieved
600 // using a strong compare-and-set (CAS) operation with relaxed
601 // memory synchronization ordering, where the expected value is
602 // the old reference and the desired value is the new reference.
603 // This operation is implemented with a 32-bit LOCK CMPXLCHG
604 // instruction, which requires the expected value (the old
605 // reference) to be in EAX. Save EAX beforehand, and move the
606 // expected value (stored in `temp_`) into EAX.
607 __ pushl(EAX);
608 __ movl(EAX, temp_);
609
610 // Convenience aliases.
611 Register base = obj_;
612 Register expected = EAX;
613 Register value = ref_reg;
614
615 bool base_equals_value = (base == value);
616 if (kPoisonHeapReferences) {
617 if (base_equals_value) {
618 // If `base` and `value` are the same register location, move
619 // `value` to a temporary register. This way, poisoning
620 // `value` won't invalidate `base`.
621 value = temp_;
622 __ movl(value, base);
623 }
624
625 // Check that the register allocator did not assign the location
626 // of `expected` (EAX) to `value` nor to `base`, so that heap
627 // poisoning (when enabled) works as intended below.
628 // - If `value` were equal to `expected`, both references would
629 // be poisoned twice, meaning they would not be poisoned at
630 // all, as heap poisoning uses address negation.
631 // - If `base` were equal to `expected`, poisoning `expected`
632 // would invalidate `base`.
633 DCHECK_NE(value, expected);
634 DCHECK_NE(base, expected);
635
636 __ PoisonHeapReference(expected);
637 __ PoisonHeapReference(value);
638 }
639
640 __ LockCmpxchgl(field_addr_, value);
641
642 // If heap poisoning is enabled, we need to unpoison the values
643 // that were poisoned earlier.
644 if (kPoisonHeapReferences) {
645 if (base_equals_value) {
646 // `value` has been moved to a temporary register, no need
647 // to unpoison it.
648 } else {
649 __ UnpoisonHeapReference(value);
650 }
651 // No need to unpoison `expected` (EAX), as it is be overwritten below.
652 }
653
654 // Restore EAX.
655 __ popl(EAX);
656
657 __ Bind(&done);
658 __ jmp(GetExitLabel());
659 }
660
661 private:
662 // The location (register) of the marked object reference.
663 const Location ref_;
664 // The register containing the object holding the marked object reference field.
665 const Register obj_;
666 // The address of the marked reference field. The base of this address must be `obj_`.
667 const Address field_addr_;
668
669 // Should the reference in `ref_` be unpoisoned prior to marking it?
670 const bool unpoison_ref_before_marking_;
671
672 const Register temp_;
673
674 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
675};
676
Roland Levillain0d5a2812015-11-13 10:07:31 +0000677// Slow path generating a read barrier for a heap reference.
678class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
679 public:
680 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
681 Location out,
682 Location ref,
683 Location obj,
684 uint32_t offset,
685 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000686 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000687 out_(out),
688 ref_(ref),
689 obj_(obj),
690 offset_(offset),
691 index_(index) {
692 DCHECK(kEmitCompilerReadBarrier);
693 // If `obj` is equal to `out` or `ref`, it means the initial object
694 // has been overwritten by (or after) the heap object reference load
695 // to be instrumented, e.g.:
696 //
697 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000698 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000699 //
700 // In that case, we have lost the information about the original
701 // object, and the emitted read barrier cannot work properly.
702 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
703 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
704 }
705
706 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
707 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
708 LocationSummary* locations = instruction_->GetLocations();
709 Register reg_out = out_.AsRegister<Register>();
710 DCHECK(locations->CanCall());
711 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100712 DCHECK(instruction_->IsInstanceFieldGet() ||
713 instruction_->IsStaticFieldGet() ||
714 instruction_->IsArrayGet() ||
715 instruction_->IsInstanceOf() ||
716 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100717 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain7c1559a2015-12-15 10:55:36 +0000718 << "Unexpected instruction in read barrier for heap reference slow path: "
719 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000720
721 __ Bind(GetEntryLabel());
722 SaveLiveRegisters(codegen, locations);
723
724 // We may have to change the index's value, but as `index_` is a
725 // constant member (like other "inputs" of this slow path),
726 // introduce a copy of it, `index`.
727 Location index = index_;
728 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100729 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000730 if (instruction_->IsArrayGet()) {
731 // Compute the actual memory offset and store it in `index`.
732 Register index_reg = index_.AsRegister<Register>();
733 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
734 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
735 // We are about to change the value of `index_reg` (see the
736 // calls to art::x86::X86Assembler::shll and
737 // art::x86::X86Assembler::AddImmediate below), but it has
738 // not been saved by the previous call to
739 // art::SlowPathCode::SaveLiveRegisters, as it is a
740 // callee-save register --
741 // art::SlowPathCode::SaveLiveRegisters does not consider
742 // callee-save registers, as it has been designed with the
743 // assumption that callee-save registers are supposed to be
744 // handled by the called function. So, as a callee-save
745 // register, `index_reg` _would_ eventually be saved onto
746 // the stack, but it would be too late: we would have
747 // changed its value earlier. Therefore, we manually save
748 // it here into another freely available register,
749 // `free_reg`, chosen of course among the caller-save
750 // registers (as a callee-save `free_reg` register would
751 // exhibit the same problem).
752 //
753 // Note we could have requested a temporary register from
754 // the register allocator instead; but we prefer not to, as
755 // this is a slow path, and we know we can find a
756 // caller-save register that is available.
757 Register free_reg = FindAvailableCallerSaveRegister(codegen);
758 __ movl(free_reg, index_reg);
759 index_reg = free_reg;
760 index = Location::RegisterLocation(index_reg);
761 } else {
762 // The initial register stored in `index_` has already been
763 // saved in the call to art::SlowPathCode::SaveLiveRegisters
764 // (as it is not a callee-save register), so we can freely
765 // use it.
766 }
767 // Shifting the index value contained in `index_reg` by the scale
768 // factor (2) cannot overflow in practice, as the runtime is
769 // unable to allocate object arrays with a size larger than
770 // 2^26 - 1 (that is, 2^28 - 4 bytes).
771 __ shll(index_reg, Immediate(TIMES_4));
772 static_assert(
773 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
774 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
775 __ AddImmediate(index_reg, Immediate(offset_));
776 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100777 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
778 // intrinsics, `index_` is not shifted by a scale factor of 2
779 // (as in the case of ArrayGet), as it is actually an offset
780 // to an object field within an object.
781 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000782 DCHECK(instruction_->GetLocations()->Intrinsified());
783 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
784 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
785 << instruction_->AsInvoke()->GetIntrinsic();
786 DCHECK_EQ(offset_, 0U);
787 DCHECK(index_.IsRegisterPair());
788 // UnsafeGet's offset location is a register pair, the low
789 // part contains the correct offset.
790 index = index_.ToLow();
791 }
792 }
793
794 // We're moving two or three locations to locations that could
795 // overlap, so we need a parallel move resolver.
796 InvokeRuntimeCallingConvention calling_convention;
797 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
798 parallel_move.AddMove(ref_,
799 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
800 Primitive::kPrimNot,
801 nullptr);
802 parallel_move.AddMove(obj_,
803 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
804 Primitive::kPrimNot,
805 nullptr);
806 if (index.IsValid()) {
807 parallel_move.AddMove(index,
808 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
809 Primitive::kPrimInt,
810 nullptr);
811 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
812 } else {
813 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
814 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
815 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100816 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000817 CheckEntrypointTypes<
818 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
819 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
820
821 RestoreLiveRegisters(codegen, locations);
822 __ jmp(GetExitLabel());
823 }
824
825 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathX86"; }
826
827 private:
828 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
829 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
830 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
831 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
832 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
833 return static_cast<Register>(i);
834 }
835 }
836 // We shall never fail to find a free caller-save register, as
837 // there are more than two core caller-save registers on x86
838 // (meaning it is possible to find one which is different from
839 // `ref` and `obj`).
840 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
841 LOG(FATAL) << "Could not find a free caller-save register";
842 UNREACHABLE();
843 }
844
Roland Levillain0d5a2812015-11-13 10:07:31 +0000845 const Location out_;
846 const Location ref_;
847 const Location obj_;
848 const uint32_t offset_;
849 // An additional location containing an index to an array.
850 // Only used for HArrayGet and the UnsafeGetObject &
851 // UnsafeGetObjectVolatile intrinsics.
852 const Location index_;
853
854 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
855};
856
857// Slow path generating a read barrier for a GC root.
858class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
859 public:
860 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000861 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000862 DCHECK(kEmitCompilerReadBarrier);
863 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000864
865 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
866 LocationSummary* locations = instruction_->GetLocations();
867 Register reg_out = out_.AsRegister<Register>();
868 DCHECK(locations->CanCall());
869 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000870 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
871 << "Unexpected instruction in read barrier for GC root slow path: "
872 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000873
874 __ Bind(GetEntryLabel());
875 SaveLiveRegisters(codegen, locations);
876
877 InvokeRuntimeCallingConvention calling_convention;
878 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
879 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100880 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000881 instruction_,
882 instruction_->GetDexPc(),
883 this);
884 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
885 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
886
887 RestoreLiveRegisters(codegen, locations);
888 __ jmp(GetExitLabel());
889 }
890
891 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86"; }
892
893 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000894 const Location out_;
895 const Location root_;
896
897 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
898};
899
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100900#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100901// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
902#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100903
Aart Bike9f37602015-10-09 11:15:55 -0700904inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700905 switch (cond) {
906 case kCondEQ: return kEqual;
907 case kCondNE: return kNotEqual;
908 case kCondLT: return kLess;
909 case kCondLE: return kLessEqual;
910 case kCondGT: return kGreater;
911 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700912 case kCondB: return kBelow;
913 case kCondBE: return kBelowEqual;
914 case kCondA: return kAbove;
915 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700916 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100917 LOG(FATAL) << "Unreachable";
918 UNREACHABLE();
919}
920
Aart Bike9f37602015-10-09 11:15:55 -0700921// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100922inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
923 switch (cond) {
924 case kCondEQ: return kEqual;
925 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700926 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100927 case kCondLT: return kBelow;
928 case kCondLE: return kBelowEqual;
929 case kCondGT: return kAbove;
930 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700931 // Unsigned remain unchanged.
932 case kCondB: return kBelow;
933 case kCondBE: return kBelowEqual;
934 case kCondA: return kAbove;
935 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100936 }
937 LOG(FATAL) << "Unreachable";
938 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700939}
940
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100941void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100942 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100943}
944
945void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100946 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100947}
948
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100949size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
950 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
951 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100952}
953
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100954size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
955 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
956 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100957}
958
Mark Mendell7c8d0092015-01-26 11:21:33 -0500959size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
960 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
961 return GetFloatingPointSpillSlotSize();
962}
963
964size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
965 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
966 return GetFloatingPointSpillSlotSize();
967}
968
Calin Juravle175dc732015-08-25 15:42:32 +0100969void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
970 HInstruction* instruction,
971 uint32_t dex_pc,
972 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100973 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100974 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
975 if (EntrypointRequiresStackMap(entrypoint)) {
976 RecordPcInfo(instruction, dex_pc, slow_path);
977 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100978}
979
Roland Levillaindec8f632016-07-22 17:10:06 +0100980void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
981 HInstruction* instruction,
982 SlowPathCode* slow_path) {
983 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100984 GenerateInvokeRuntime(entry_point_offset);
985}
986
987void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +0100988 __ fs()->call(Address::Absolute(entry_point_offset));
989}
990
Mark Mendellfb8d2792015-03-31 22:16:59 -0400991CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000992 const X86InstructionSetFeatures& isa_features,
993 const CompilerOptions& compiler_options,
994 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -0500995 : CodeGenerator(graph,
996 kNumberOfCpuRegisters,
997 kNumberOfXmmRegisters,
998 kNumberOfRegisterPairs,
999 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1000 arraysize(kCoreCalleeSaves))
1001 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001002 0,
1003 compiler_options,
1004 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001005 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001006 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001007 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001008 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001009 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001010 isa_features_(isa_features),
Vladimir Marko5233f932015-09-29 19:01:15 +01001011 method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Mark Mendell805b3b52015-09-18 14:10:29 -04001012 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001013 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001014 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1015 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001016 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001017 constant_area_start_(-1),
1018 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1019 method_address_offset_(-1) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001020 // Use a fake return address register to mimic Quick.
1021 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001022}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001023
David Brazdil58282f42016-01-14 12:45:10 +00001024void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001025 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001026 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001027}
1028
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001029InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001030 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001031 assembler_(codegen->GetAssembler()),
1032 codegen_(codegen) {}
1033
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001034static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001035 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001036}
1037
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001038void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001039 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001040 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001041 bool skip_overflow_check =
1042 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001043 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001044
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001045 if (!skip_overflow_check) {
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001046 __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001047 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001048 }
1049
Mark Mendell5f874182015-03-04 15:42:45 -05001050 if (HasEmptyFrame()) {
1051 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001052 }
Mark Mendell5f874182015-03-04 15:42:45 -05001053
1054 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1055 Register reg = kCoreCalleeSaves[i];
1056 if (allocated_registers_.ContainsCoreRegister(reg)) {
1057 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001058 __ cfi().AdjustCFAOffset(kX86WordSize);
1059 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001060 }
1061 }
1062
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001063 int adjust = GetFrameSize() - FrameEntrySpillSize();
1064 __ subl(ESP, Immediate(adjust));
1065 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001066 // Save the current method if we need it. Note that we do not
1067 // do this in HCurrentMethod, as the instruction might have been removed
1068 // in the SSA graph.
1069 if (RequiresCurrentMethod()) {
1070 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1071 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001072}
1073
1074void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001075 __ cfi().RememberState();
1076 if (!HasEmptyFrame()) {
1077 int adjust = GetFrameSize() - FrameEntrySpillSize();
1078 __ addl(ESP, Immediate(adjust));
1079 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001080
David Srbeckyc34dc932015-04-12 09:27:43 +01001081 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1082 Register reg = kCoreCalleeSaves[i];
1083 if (allocated_registers_.ContainsCoreRegister(reg)) {
1084 __ popl(reg);
1085 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1086 __ cfi().Restore(DWARFReg(reg));
1087 }
Mark Mendell5f874182015-03-04 15:42:45 -05001088 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001089 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001090 __ ret();
1091 __ cfi().RestoreState();
1092 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001093}
1094
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001095void CodeGeneratorX86::Bind(HBasicBlock* block) {
1096 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001097}
1098
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001099Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(Primitive::Type type) const {
1100 switch (type) {
1101 case Primitive::kPrimBoolean:
1102 case Primitive::kPrimByte:
1103 case Primitive::kPrimChar:
1104 case Primitive::kPrimShort:
1105 case Primitive::kPrimInt:
1106 case Primitive::kPrimNot:
1107 return Location::RegisterLocation(EAX);
1108
1109 case Primitive::kPrimLong:
1110 return Location::RegisterPairLocation(EAX, EDX);
1111
1112 case Primitive::kPrimVoid:
1113 return Location::NoLocation();
1114
1115 case Primitive::kPrimDouble:
1116 case Primitive::kPrimFloat:
1117 return Location::FpuRegisterLocation(XMM0);
1118 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001119
1120 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001121}
1122
1123Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1124 return Location::RegisterLocation(kMethodRegisterArgument);
1125}
1126
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001127Location InvokeDexCallingConventionVisitorX86::GetNextLocation(Primitive::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001128 switch (type) {
1129 case Primitive::kPrimBoolean:
1130 case Primitive::kPrimByte:
1131 case Primitive::kPrimChar:
1132 case Primitive::kPrimShort:
1133 case Primitive::kPrimInt:
1134 case Primitive::kPrimNot: {
1135 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001136 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001137 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001138 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001139 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001140 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001141 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001142 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001143
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001144 case Primitive::kPrimLong: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001145 uint32_t index = gp_index_;
1146 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001147 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001148 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001149 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1150 calling_convention.GetRegisterPairAt(index));
1151 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001152 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001153 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1154 }
1155 }
1156
1157 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001158 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001159 stack_index_++;
1160 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1161 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1162 } else {
1163 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1164 }
1165 }
1166
1167 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001168 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001169 stack_index_ += 2;
1170 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1171 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1172 } else {
1173 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001174 }
1175 }
1176
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001177 case Primitive::kPrimVoid:
1178 LOG(FATAL) << "Unexpected parameter type " << type;
1179 break;
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001180 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001181 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001182}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001183
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001184void CodeGeneratorX86::Move32(Location destination, Location source) {
1185 if (source.Equals(destination)) {
1186 return;
1187 }
1188 if (destination.IsRegister()) {
1189 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001190 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001191 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001192 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001193 } else {
1194 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001195 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001196 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001197 } else if (destination.IsFpuRegister()) {
1198 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001199 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001200 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001201 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001202 } else {
1203 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001204 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001205 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001206 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001207 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001208 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001209 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001210 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001211 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001212 } else if (source.IsConstant()) {
1213 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001214 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001215 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001216 } else {
1217 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001218 __ pushl(Address(ESP, source.GetStackIndex()));
1219 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001220 }
1221 }
1222}
1223
1224void CodeGeneratorX86::Move64(Location destination, Location source) {
1225 if (source.Equals(destination)) {
1226 return;
1227 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001228 if (destination.IsRegisterPair()) {
1229 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001230 EmitParallelMoves(
1231 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1232 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001233 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001234 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001235 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
1236 Primitive::kPrimInt);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001237 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001238 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1239 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1240 __ psrlq(src_reg, Immediate(32));
1241 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001242 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001243 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001244 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001245 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1246 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001247 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1248 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001249 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001250 if (source.IsFpuRegister()) {
1251 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1252 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001253 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001254 } else if (source.IsRegisterPair()) {
1255 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
1256 // Create stack space for 2 elements.
1257 __ subl(ESP, Immediate(2 * elem_size));
1258 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1259 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1260 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1261 // And remove the temporary stack space we allocated.
1262 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001263 } else {
1264 LOG(FATAL) << "Unimplemented";
1265 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001266 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001267 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001268 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001269 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001270 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001271 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001272 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001273 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001274 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001275 } else if (source.IsConstant()) {
1276 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001277 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1278 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001279 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001280 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1281 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001282 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001283 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001284 EmitParallelMoves(
1285 Location::StackSlot(source.GetStackIndex()),
1286 Location::StackSlot(destination.GetStackIndex()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001287 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001288 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001289 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
1290 Primitive::kPrimInt);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001291 }
1292 }
1293}
1294
Calin Juravle175dc732015-08-25 15:42:32 +01001295void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1296 DCHECK(location.IsRegister());
1297 __ movl(location.AsRegister<Register>(), Immediate(value));
1298}
1299
Calin Juravlee460d1d2015-09-29 04:52:17 +01001300void CodeGeneratorX86::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001301 HParallelMove move(GetGraph()->GetArena());
1302 if (dst_type == Primitive::kPrimLong && !src.IsConstant() && !src.IsFpuRegister()) {
1303 move.AddMove(src.ToLow(), dst.ToLow(), Primitive::kPrimInt, nullptr);
1304 move.AddMove(src.ToHigh(), dst.ToHigh(), Primitive::kPrimInt, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001305 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001306 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001307 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001308 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001309}
1310
1311void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1312 if (location.IsRegister()) {
1313 locations->AddTemp(location);
1314 } else if (location.IsRegisterPair()) {
1315 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1316 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1317 } else {
1318 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1319 }
1320}
1321
David Brazdilfc6a86a2015-06-26 10:33:45 +00001322void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001323 DCHECK(!successor->IsExitBlock());
1324
1325 HBasicBlock* block = got->GetBlock();
1326 HInstruction* previous = got->GetPrevious();
1327
1328 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001329 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001330 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1331 return;
1332 }
1333
1334 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1335 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1336 }
1337 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001338 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001339 }
1340}
1341
David Brazdilfc6a86a2015-06-26 10:33:45 +00001342void LocationsBuilderX86::VisitGoto(HGoto* got) {
1343 got->SetLocations(nullptr);
1344}
1345
1346void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1347 HandleGoto(got, got->GetSuccessor());
1348}
1349
1350void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1351 try_boundary->SetLocations(nullptr);
1352}
1353
1354void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1355 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1356 if (!successor->IsExitBlock()) {
1357 HandleGoto(try_boundary, successor);
1358 }
1359}
1360
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001361void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001362 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001363}
1364
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001365void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001366}
1367
Mark Mendell152408f2015-12-31 12:28:50 -05001368template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001369void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001370 LabelType* true_label,
1371 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001372 if (cond->IsFPConditionTrueIfNaN()) {
1373 __ j(kUnordered, true_label);
1374 } else if (cond->IsFPConditionFalseIfNaN()) {
1375 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001376 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001377 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001378}
1379
Mark Mendell152408f2015-12-31 12:28:50 -05001380template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001381void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001382 LabelType* true_label,
1383 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001384 LocationSummary* locations = cond->GetLocations();
1385 Location left = locations->InAt(0);
1386 Location right = locations->InAt(1);
1387 IfCondition if_cond = cond->GetCondition();
1388
Mark Mendellc4701932015-04-10 13:18:51 -04001389 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001390 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001391 IfCondition true_high_cond = if_cond;
1392 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001393 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001394
1395 // Set the conditions for the test, remembering that == needs to be
1396 // decided using the low words.
1397 switch (if_cond) {
1398 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001399 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001400 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001401 break;
1402 case kCondLT:
1403 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001404 break;
1405 case kCondLE:
1406 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001407 break;
1408 case kCondGT:
1409 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001410 break;
1411 case kCondGE:
1412 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001413 break;
Aart Bike9f37602015-10-09 11:15:55 -07001414 case kCondB:
1415 false_high_cond = kCondA;
1416 break;
1417 case kCondBE:
1418 true_high_cond = kCondB;
1419 break;
1420 case kCondA:
1421 false_high_cond = kCondB;
1422 break;
1423 case kCondAE:
1424 true_high_cond = kCondA;
1425 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001426 }
1427
1428 if (right.IsConstant()) {
1429 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001430 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001431 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001432
Aart Bika19616e2016-02-01 18:57:58 -08001433 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001434 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001435 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001436 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001437 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001438 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001439 __ j(X86Condition(true_high_cond), true_label);
1440 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001441 }
1442 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001443 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001444 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001445 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001446 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001447
1448 __ cmpl(left_high, right_high);
1449 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001450 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001451 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001452 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001453 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001454 __ j(X86Condition(true_high_cond), true_label);
1455 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001456 }
1457 // Must be equal high, so compare the lows.
1458 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001459 } else {
1460 DCHECK(right.IsDoubleStackSlot());
1461 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1462 if (if_cond == kCondNE) {
1463 __ j(X86Condition(true_high_cond), true_label);
1464 } else if (if_cond == kCondEQ) {
1465 __ j(X86Condition(false_high_cond), false_label);
1466 } else {
1467 __ j(X86Condition(true_high_cond), true_label);
1468 __ j(X86Condition(false_high_cond), false_label);
1469 }
1470 // Must be equal high, so compare the lows.
1471 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001472 }
1473 // The last comparison might be unsigned.
1474 __ j(final_condition, true_label);
1475}
1476
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001477void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1478 Location rhs,
1479 HInstruction* insn,
1480 bool is_double) {
1481 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1482 if (is_double) {
1483 if (rhs.IsFpuRegister()) {
1484 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1485 } else if (const_area != nullptr) {
1486 DCHECK(const_area->IsEmittedAtUseSite());
1487 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1488 codegen_->LiteralDoubleAddress(
1489 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1490 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
1491 } else {
1492 DCHECK(rhs.IsDoubleStackSlot());
1493 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1494 }
1495 } else {
1496 if (rhs.IsFpuRegister()) {
1497 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1498 } else if (const_area != nullptr) {
1499 DCHECK(const_area->IsEmittedAtUseSite());
1500 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1501 codegen_->LiteralFloatAddress(
1502 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1503 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
1504 } else {
1505 DCHECK(rhs.IsStackSlot());
1506 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1507 }
1508 }
1509}
1510
Mark Mendell152408f2015-12-31 12:28:50 -05001511template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001512void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001513 LabelType* true_target_in,
1514 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001515 // Generated branching requires both targets to be explicit. If either of the
1516 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001517 LabelType fallthrough_target;
1518 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1519 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001520
Mark Mendellc4701932015-04-10 13:18:51 -04001521 LocationSummary* locations = condition->GetLocations();
1522 Location left = locations->InAt(0);
1523 Location right = locations->InAt(1);
1524
Mark Mendellc4701932015-04-10 13:18:51 -04001525 Primitive::Type type = condition->InputAt(0)->GetType();
1526 switch (type) {
1527 case Primitive::kPrimLong:
1528 GenerateLongComparesAndJumps(condition, true_target, false_target);
1529 break;
1530 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001531 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001532 GenerateFPJumps(condition, true_target, false_target);
1533 break;
1534 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001535 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001536 GenerateFPJumps(condition, true_target, false_target);
1537 break;
1538 default:
1539 LOG(FATAL) << "Unexpected compare type " << type;
1540 }
1541
David Brazdil0debae72015-11-12 18:37:00 +00001542 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001543 __ jmp(false_target);
1544 }
David Brazdil0debae72015-11-12 18:37:00 +00001545
1546 if (fallthrough_target.IsLinked()) {
1547 __ Bind(&fallthrough_target);
1548 }
Mark Mendellc4701932015-04-10 13:18:51 -04001549}
1550
David Brazdil0debae72015-11-12 18:37:00 +00001551static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1552 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1553 // are set only strictly before `branch`. We can't use the eflags on long/FP
1554 // conditions if they are materialized due to the complex branching.
1555 return cond->IsCondition() &&
1556 cond->GetNext() == branch &&
1557 cond->InputAt(0)->GetType() != Primitive::kPrimLong &&
1558 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1559}
1560
Mark Mendell152408f2015-12-31 12:28:50 -05001561template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001562void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001563 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001564 LabelType* true_target,
1565 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001566 HInstruction* cond = instruction->InputAt(condition_input_index);
1567
1568 if (true_target == nullptr && false_target == nullptr) {
1569 // Nothing to do. The code always falls through.
1570 return;
1571 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001572 // Constant condition, statically compared against "true" (integer value 1).
1573 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001574 if (true_target != nullptr) {
1575 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001576 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001577 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001578 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001579 if (false_target != nullptr) {
1580 __ jmp(false_target);
1581 }
1582 }
1583 return;
1584 }
1585
1586 // The following code generates these patterns:
1587 // (1) true_target == nullptr && false_target != nullptr
1588 // - opposite condition true => branch to false_target
1589 // (2) true_target != nullptr && false_target == nullptr
1590 // - condition true => branch to true_target
1591 // (3) true_target != nullptr && false_target != nullptr
1592 // - condition true => branch to true_target
1593 // - branch to false_target
1594 if (IsBooleanValueOrMaterializedCondition(cond)) {
1595 if (AreEflagsSetFrom(cond, instruction)) {
1596 if (true_target == nullptr) {
1597 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1598 } else {
1599 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1600 }
1601 } else {
1602 // Materialized condition, compare against 0.
1603 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1604 if (lhs.IsRegister()) {
1605 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1606 } else {
1607 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1608 }
1609 if (true_target == nullptr) {
1610 __ j(kEqual, false_target);
1611 } else {
1612 __ j(kNotEqual, true_target);
1613 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001614 }
1615 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001616 // Condition has not been materialized, use its inputs as the comparison and
1617 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001618 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001619
1620 // If this is a long or FP comparison that has been folded into
1621 // the HCondition, generate the comparison directly.
1622 Primitive::Type type = condition->InputAt(0)->GetType();
1623 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1624 GenerateCompareTestAndBranch(condition, true_target, false_target);
1625 return;
1626 }
1627
1628 Location lhs = condition->GetLocations()->InAt(0);
1629 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001630 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001631 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001632 if (true_target == nullptr) {
1633 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1634 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001635 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001636 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001637 }
David Brazdil0debae72015-11-12 18:37:00 +00001638
1639 // If neither branch falls through (case 3), the conditional branch to `true_target`
1640 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1641 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001642 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001643 }
1644}
1645
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001646void LocationsBuilderX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001647 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1648 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001649 locations->SetInAt(0, Location::Any());
1650 }
1651}
1652
1653void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001654 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1655 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1656 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1657 nullptr : codegen_->GetLabelOf(true_successor);
1658 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1659 nullptr : codegen_->GetLabelOf(false_successor);
1660 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001661}
1662
1663void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
1664 LocationSummary* locations = new (GetGraph()->GetArena())
1665 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001666 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001667 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001668 locations->SetInAt(0, Location::Any());
1669 }
1670}
1671
1672void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001673 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001674 GenerateTestAndBranch<Label>(deoptimize,
1675 /* condition_input_index */ 0,
1676 slow_path->GetEntryLabel(),
1677 /* false_target */ nullptr);
1678}
1679
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001680static bool SelectCanUseCMOV(HSelect* select) {
1681 // There are no conditional move instructions for XMMs.
1682 if (Primitive::IsFloatingPointType(select->GetType())) {
1683 return false;
1684 }
1685
1686 // A FP condition doesn't generate the single CC that we need.
1687 // In 32 bit mode, a long condition doesn't generate a single CC either.
1688 HInstruction* condition = select->GetCondition();
1689 if (condition->IsCondition()) {
1690 Primitive::Type compare_type = condition->InputAt(0)->GetType();
1691 if (compare_type == Primitive::kPrimLong ||
1692 Primitive::IsFloatingPointType(compare_type)) {
1693 return false;
1694 }
1695 }
1696
1697 // We can generate a CMOV for this Select.
1698 return true;
1699}
1700
David Brazdil74eb1b22015-12-14 11:44:01 +00001701void LocationsBuilderX86::VisitSelect(HSelect* select) {
1702 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001703 if (Primitive::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001704 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001705 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001706 } else {
1707 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001708 if (SelectCanUseCMOV(select)) {
1709 if (select->InputAt(1)->IsConstant()) {
1710 // Cmov can't handle a constant value.
1711 locations->SetInAt(1, Location::RequiresRegister());
1712 } else {
1713 locations->SetInAt(1, Location::Any());
1714 }
1715 } else {
1716 locations->SetInAt(1, Location::Any());
1717 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001718 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001719 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1720 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001721 }
1722 locations->SetOut(Location::SameAsFirstInput());
1723}
1724
1725void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1726 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001727 DCHECK(locations->InAt(0).Equals(locations->Out()));
1728 if (SelectCanUseCMOV(select)) {
1729 // If both the condition and the source types are integer, we can generate
1730 // a CMOV to implement Select.
1731
1732 HInstruction* select_condition = select->GetCondition();
1733 Condition cond = kNotEqual;
1734
1735 // Figure out how to test the 'condition'.
1736 if (select_condition->IsCondition()) {
1737 HCondition* condition = select_condition->AsCondition();
1738 if (!condition->IsEmittedAtUseSite()) {
1739 // This was a previously materialized condition.
1740 // Can we use the existing condition code?
1741 if (AreEflagsSetFrom(condition, select)) {
1742 // Materialization was the previous instruction. Condition codes are right.
1743 cond = X86Condition(condition->GetCondition());
1744 } else {
1745 // No, we have to recreate the condition code.
1746 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1747 __ testl(cond_reg, cond_reg);
1748 }
1749 } else {
1750 // We can't handle FP or long here.
1751 DCHECK_NE(condition->InputAt(0)->GetType(), Primitive::kPrimLong);
1752 DCHECK(!Primitive::IsFloatingPointType(condition->InputAt(0)->GetType()));
1753 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001754 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001755 cond = X86Condition(condition->GetCondition());
1756 }
1757 } else {
1758 // Must be a boolean condition, which needs to be compared to 0.
1759 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1760 __ testl(cond_reg, cond_reg);
1761 }
1762
1763 // If the condition is true, overwrite the output, which already contains false.
1764 Location false_loc = locations->InAt(0);
1765 Location true_loc = locations->InAt(1);
1766 if (select->GetType() == Primitive::kPrimLong) {
1767 // 64 bit conditional move.
1768 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1769 Register false_low = false_loc.AsRegisterPairLow<Register>();
1770 if (true_loc.IsRegisterPair()) {
1771 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1772 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1773 } else {
1774 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1775 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1776 }
1777 } else {
1778 // 32 bit conditional move.
1779 Register false_reg = false_loc.AsRegister<Register>();
1780 if (true_loc.IsRegister()) {
1781 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1782 } else {
1783 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1784 }
1785 }
1786 } else {
1787 NearLabel false_target;
1788 GenerateTestAndBranch<NearLabel>(
1789 select, /* condition_input_index */ 2, /* true_target */ nullptr, &false_target);
1790 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1791 __ Bind(&false_target);
1792 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001793}
1794
David Srbecky0cf44932015-12-09 14:09:59 +00001795void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1796 new (GetGraph()->GetArena()) LocationSummary(info);
1797}
1798
David Srbeckyd28f4a02016-03-14 17:14:24 +00001799void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1800 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001801}
1802
1803void CodeGeneratorX86::GenerateNop() {
1804 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001805}
1806
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001807void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001808 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001809 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001810 // Handle the long/FP comparisons made in instruction simplification.
1811 switch (cond->InputAt(0)->GetType()) {
1812 case Primitive::kPrimLong: {
1813 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001814 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001815 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001816 locations->SetOut(Location::RequiresRegister());
1817 }
1818 break;
1819 }
1820 case Primitive::kPrimFloat:
1821 case Primitive::kPrimDouble: {
1822 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001823 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1824 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1825 } else if (cond->InputAt(1)->IsConstant()) {
1826 locations->SetInAt(1, Location::RequiresFpuRegister());
1827 } else {
1828 locations->SetInAt(1, Location::Any());
1829 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001830 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001831 locations->SetOut(Location::RequiresRegister());
1832 }
1833 break;
1834 }
1835 default:
1836 locations->SetInAt(0, Location::RequiresRegister());
1837 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001838 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001839 // We need a byte register.
1840 locations->SetOut(Location::RegisterLocation(ECX));
1841 }
1842 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001843 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001844}
1845
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001846void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001847 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001848 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001849 }
Mark Mendellc4701932015-04-10 13:18:51 -04001850
1851 LocationSummary* locations = cond->GetLocations();
1852 Location lhs = locations->InAt(0);
1853 Location rhs = locations->InAt(1);
1854 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001855 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001856
1857 switch (cond->InputAt(0)->GetType()) {
1858 default: {
1859 // Integer case.
1860
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001861 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001862 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001863 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001864 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001865 return;
1866 }
1867 case Primitive::kPrimLong:
1868 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1869 break;
1870 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001871 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001872 GenerateFPJumps(cond, &true_label, &false_label);
1873 break;
1874 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001875 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001876 GenerateFPJumps(cond, &true_label, &false_label);
1877 break;
1878 }
1879
1880 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001881 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001882
Roland Levillain4fa13f62015-07-06 18:11:54 +01001883 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001884 __ Bind(&false_label);
1885 __ xorl(reg, reg);
1886 __ jmp(&done_label);
1887
Roland Levillain4fa13f62015-07-06 18:11:54 +01001888 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001889 __ Bind(&true_label);
1890 __ movl(reg, Immediate(1));
1891 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001892}
1893
1894void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001895 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001896}
1897
1898void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001899 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001900}
1901
1902void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001903 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001904}
1905
1906void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001907 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001908}
1909
1910void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001911 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001912}
1913
1914void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001915 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001916}
1917
1918void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001919 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001920}
1921
1922void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001923 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001924}
1925
1926void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001927 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001928}
1929
1930void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001931 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001932}
1933
1934void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001935 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001936}
1937
1938void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001939 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001940}
1941
Aart Bike9f37602015-10-09 11:15:55 -07001942void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001943 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001944}
1945
1946void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001947 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001948}
1949
1950void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001951 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001952}
1953
1954void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001955 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001956}
1957
1958void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001959 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001960}
1961
1962void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001963 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001964}
1965
1966void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001967 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001968}
1969
1970void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001971 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001972}
1973
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001974void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001975 LocationSummary* locations =
1976 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001977 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001978}
1979
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001980void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001981 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001982}
1983
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001984void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
1985 LocationSummary* locations =
1986 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1987 locations->SetOut(Location::ConstantLocation(constant));
1988}
1989
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001990void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001991 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001992}
1993
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001994void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001995 LocationSummary* locations =
1996 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001997 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001998}
1999
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002000void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002001 // Will be generated at use site.
2002}
2003
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002004void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2005 LocationSummary* locations =
2006 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2007 locations->SetOut(Location::ConstantLocation(constant));
2008}
2009
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002010void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002011 // Will be generated at use site.
2012}
2013
2014void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2015 LocationSummary* locations =
2016 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2017 locations->SetOut(Location::ConstantLocation(constant));
2018}
2019
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002020void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002021 // Will be generated at use site.
2022}
2023
Calin Juravle27df7582015-04-17 19:12:31 +01002024void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2025 memory_barrier->SetLocations(nullptr);
2026}
2027
2028void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002029 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002030}
2031
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002032void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002033 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002034}
2035
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002036void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002037 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002038}
2039
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002040void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002041 LocationSummary* locations =
2042 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002043 switch (ret->InputAt(0)->GetType()) {
2044 case Primitive::kPrimBoolean:
2045 case Primitive::kPrimByte:
2046 case Primitive::kPrimChar:
2047 case Primitive::kPrimShort:
2048 case Primitive::kPrimInt:
2049 case Primitive::kPrimNot:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002050 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002051 break;
2052
2053 case Primitive::kPrimLong:
2054 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002055 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002056 break;
2057
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002058 case Primitive::kPrimFloat:
2059 case Primitive::kPrimDouble:
2060 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002061 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002062 break;
2063
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002064 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002065 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002066 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002067}
2068
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002069void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002070 if (kIsDebugBuild) {
2071 switch (ret->InputAt(0)->GetType()) {
2072 case Primitive::kPrimBoolean:
2073 case Primitive::kPrimByte:
2074 case Primitive::kPrimChar:
2075 case Primitive::kPrimShort:
2076 case Primitive::kPrimInt:
2077 case Primitive::kPrimNot:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002078 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002079 break;
2080
2081 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002082 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2083 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002084 break;
2085
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002086 case Primitive::kPrimFloat:
2087 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002088 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002089 break;
2090
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002091 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002092 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002093 }
2094 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002095 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002096}
2097
Calin Juravle175dc732015-08-25 15:42:32 +01002098void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2099 // The trampoline uses the same calling convention as dex calling conventions,
2100 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2101 // the method_idx.
2102 HandleInvoke(invoke);
2103}
2104
2105void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2106 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2107}
2108
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002109void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002110 // Explicit clinit checks triggered by static invokes must have been pruned by
2111 // art::PrepareForRegisterAllocation.
2112 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002113
Mark Mendellfb8d2792015-03-31 22:16:59 -04002114 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002115 if (intrinsic.TryDispatch(invoke)) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002116 if (invoke->GetLocations()->CanCall() && invoke->HasPcRelativeDexCache()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002117 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002118 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002119 return;
2120 }
2121
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002122 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002123
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002124 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
2125 if (invoke->HasPcRelativeDexCache()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002126 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002127 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002128}
2129
Mark Mendell09ed1a32015-03-25 08:30:06 -04002130static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2131 if (invoke->GetLocations()->Intrinsified()) {
2132 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2133 intrinsic.Dispatch(invoke);
2134 return true;
2135 }
2136 return false;
2137}
2138
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002139void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002140 // Explicit clinit checks triggered by static invokes must have been pruned by
2141 // art::PrepareForRegisterAllocation.
2142 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002143
Mark Mendell09ed1a32015-03-25 08:30:06 -04002144 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2145 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002146 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002147
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002148 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002149 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002150 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Mingyao Yang8693fe12015-04-17 16:51:08 -07002151 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002152}
2153
2154void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002155 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2156 if (intrinsic.TryDispatch(invoke)) {
2157 return;
2158 }
2159
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002160 HandleInvoke(invoke);
2161}
2162
2163void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002164 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002165 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002166}
2167
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002168void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002169 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2170 return;
2171 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002172
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002173 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002174 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002175 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002176}
2177
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002178void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002179 // This call to HandleInvoke allocates a temporary (core) register
2180 // which is also used to transfer the hidden argument from FP to
2181 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002182 HandleInvoke(invoke);
2183 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002184 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002185}
2186
2187void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2188 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002189 LocationSummary* locations = invoke->GetLocations();
2190 Register temp = locations->GetTemp(0).AsRegister<Register>();
2191 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002192 Location receiver = locations->InAt(0);
2193 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2194
Roland Levillain0d5a2812015-11-13 10:07:31 +00002195 // Set the hidden argument. This is safe to do this here, as XMM7
2196 // won't be modified thereafter, before the `call` instruction.
2197 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002198 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002199 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002200
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002201 if (receiver.IsStackSlot()) {
2202 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002203 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002204 __ movl(temp, Address(temp, class_offset));
2205 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002206 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002207 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002208 }
Roland Levillain4d027112015-07-01 15:41:14 +01002209 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002210 // Instead of simply (possibly) unpoisoning `temp` here, we should
2211 // emit a read barrier for the previous class reference load.
2212 // However this is not required in practice, as this is an
2213 // intermediate/temporary reference and because the current
2214 // concurrent copying collector keeps the from-space memory
2215 // intact/accessible until the end of the marking phase (the
2216 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002217 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002218 // temp = temp->GetAddressOfIMT()
2219 __ movl(temp,
2220 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002221 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002222 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002223 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002224 __ movl(temp, Address(temp, method_offset));
2225 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002226 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002227 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002228
2229 DCHECK(!codegen_->IsLeafMethod());
2230 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2231}
2232
Roland Levillain88cb1752014-10-20 16:36:47 +01002233void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2234 LocationSummary* locations =
2235 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2236 switch (neg->GetResultType()) {
2237 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002238 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002239 locations->SetInAt(0, Location::RequiresRegister());
2240 locations->SetOut(Location::SameAsFirstInput());
2241 break;
2242
Roland Levillain88cb1752014-10-20 16:36:47 +01002243 case Primitive::kPrimFloat:
Roland Levillain5368c212014-11-27 15:03:41 +00002244 locations->SetInAt(0, Location::RequiresFpuRegister());
2245 locations->SetOut(Location::SameAsFirstInput());
2246 locations->AddTemp(Location::RequiresRegister());
2247 locations->AddTemp(Location::RequiresFpuRegister());
2248 break;
2249
Roland Levillain88cb1752014-10-20 16:36:47 +01002250 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002251 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002252 locations->SetOut(Location::SameAsFirstInput());
2253 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002254 break;
2255
2256 default:
2257 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2258 }
2259}
2260
2261void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2262 LocationSummary* locations = neg->GetLocations();
2263 Location out = locations->Out();
2264 Location in = locations->InAt(0);
2265 switch (neg->GetResultType()) {
2266 case Primitive::kPrimInt:
2267 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002268 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002269 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002270 break;
2271
2272 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002273 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002274 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002275 __ negl(out.AsRegisterPairLow<Register>());
2276 // Negation is similar to subtraction from zero. The least
2277 // significant byte triggers a borrow when it is different from
2278 // zero; to take it into account, add 1 to the most significant
2279 // byte if the carry flag (CF) is set to 1 after the first NEGL
2280 // operation.
2281 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2282 __ negl(out.AsRegisterPairHigh<Register>());
2283 break;
2284
Roland Levillain5368c212014-11-27 15:03:41 +00002285 case Primitive::kPrimFloat: {
2286 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002287 Register constant = locations->GetTemp(0).AsRegister<Register>();
2288 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002289 // Implement float negation with an exclusive or with value
2290 // 0x80000000 (mask for bit 31, representing the sign of a
2291 // single-precision floating-point number).
2292 __ movl(constant, Immediate(INT32_C(0x80000000)));
2293 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002294 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002295 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002296 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002297
Roland Levillain5368c212014-11-27 15:03:41 +00002298 case Primitive::kPrimDouble: {
2299 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002300 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002301 // Implement double negation with an exclusive or with value
2302 // 0x8000000000000000 (mask for bit 63, representing the sign of
2303 // a double-precision floating-point number).
2304 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002305 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002306 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002307 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002308
2309 default:
2310 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2311 }
2312}
2313
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002314void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2315 LocationSummary* locations =
2316 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2317 DCHECK(Primitive::IsFloatingPointType(neg->GetType()));
2318 locations->SetInAt(0, Location::RequiresFpuRegister());
2319 locations->SetInAt(1, Location::RequiresRegister());
2320 locations->SetOut(Location::SameAsFirstInput());
2321 locations->AddTemp(Location::RequiresFpuRegister());
2322}
2323
2324void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2325 LocationSummary* locations = neg->GetLocations();
2326 Location out = locations->Out();
2327 DCHECK(locations->InAt(0).Equals(out));
2328
2329 Register constant_area = locations->InAt(1).AsRegister<Register>();
2330 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2331 if (neg->GetType() == Primitive::kPrimFloat) {
2332 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000), constant_area));
2333 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2334 } else {
2335 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000), constant_area));
2336 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2337 }
2338}
2339
Roland Levillaindff1f282014-11-05 14:15:05 +00002340void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Roland Levillaindff1f282014-11-05 14:15:05 +00002341 Primitive::Type result_type = conversion->GetResultType();
2342 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002343 DCHECK_NE(result_type, input_type);
Roland Levillain624279f2014-12-04 11:54:28 +00002344
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002345 // The float-to-long and double-to-long type conversions rely on a
2346 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002347 LocationSummary::CallKind call_kind =
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002348 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
2349 && result_type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002350 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002351 : LocationSummary::kNoCall;
2352 LocationSummary* locations =
2353 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
2354
David Brazdilb2bd1c52015-03-25 11:17:37 +00002355 // The Java language does not allow treating boolean as an integral type but
2356 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002357
Roland Levillaindff1f282014-11-05 14:15:05 +00002358 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002359 case Primitive::kPrimByte:
2360 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002361 case Primitive::kPrimLong: {
2362 // Type conversion from long to byte is a result of code transformations.
2363 HInstruction* input = conversion->InputAt(0);
2364 Location input_location = input->IsConstant()
2365 ? Location::ConstantLocation(input->AsConstant())
2366 : Location::RegisterPairLocation(EAX, EDX);
2367 locations->SetInAt(0, input_location);
2368 // Make the output overlap to please the register allocator. This greatly simplifies
2369 // the validation of the linear scan implementation
2370 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2371 break;
2372 }
David Brazdil46e2a392015-03-16 17:31:52 +00002373 case Primitive::kPrimBoolean:
2374 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002375 case Primitive::kPrimShort:
2376 case Primitive::kPrimInt:
2377 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002378 // Processing a Dex `int-to-byte' instruction.
Mark Mendell5f874182015-03-04 15:42:45 -05002379 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2380 // Make the output overlap to please the register allocator. This greatly simplifies
2381 // the validation of the linear scan implementation
2382 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Roland Levillain51d3fc42014-11-13 14:11:42 +00002383 break;
2384
2385 default:
2386 LOG(FATAL) << "Unexpected type conversion from " << input_type
2387 << " to " << result_type;
2388 }
2389 break;
2390
Roland Levillain01a8d712014-11-14 16:27:39 +00002391 case Primitive::kPrimShort:
2392 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002393 case Primitive::kPrimLong:
2394 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002395 case Primitive::kPrimBoolean:
2396 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002397 case Primitive::kPrimByte:
2398 case Primitive::kPrimInt:
2399 case Primitive::kPrimChar:
2400 // Processing a Dex `int-to-short' instruction.
2401 locations->SetInAt(0, Location::Any());
2402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2403 break;
2404
2405 default:
2406 LOG(FATAL) << "Unexpected type conversion from " << input_type
2407 << " to " << result_type;
2408 }
2409 break;
2410
Roland Levillain946e1432014-11-11 17:35:19 +00002411 case Primitive::kPrimInt:
2412 switch (input_type) {
2413 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002414 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002415 locations->SetInAt(0, Location::Any());
2416 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2417 break;
2418
2419 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002420 // Processing a Dex `float-to-int' instruction.
2421 locations->SetInAt(0, Location::RequiresFpuRegister());
2422 locations->SetOut(Location::RequiresRegister());
2423 locations->AddTemp(Location::RequiresFpuRegister());
2424 break;
2425
Roland Levillain946e1432014-11-11 17:35:19 +00002426 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002427 // Processing a Dex `double-to-int' instruction.
2428 locations->SetInAt(0, Location::RequiresFpuRegister());
2429 locations->SetOut(Location::RequiresRegister());
2430 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002431 break;
2432
2433 default:
2434 LOG(FATAL) << "Unexpected type conversion from " << input_type
2435 << " to " << result_type;
2436 }
2437 break;
2438
Roland Levillaindff1f282014-11-05 14:15:05 +00002439 case Primitive::kPrimLong:
2440 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002441 case Primitive::kPrimBoolean:
2442 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002443 case Primitive::kPrimByte:
2444 case Primitive::kPrimShort:
2445 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002446 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002447 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002448 locations->SetInAt(0, Location::RegisterLocation(EAX));
2449 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2450 break;
2451
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002452 case Primitive::kPrimFloat:
Vladimir Marko949c91f2015-01-27 10:48:44 +00002453 case Primitive::kPrimDouble: {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002454 // Processing a Dex `float-to-long' or 'double-to-long' instruction.
Vladimir Marko949c91f2015-01-27 10:48:44 +00002455 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002456 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2457 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2458
Vladimir Marko949c91f2015-01-27 10:48:44 +00002459 // The runtime helper puts the result in EAX, EDX.
2460 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002461 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002462 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002463
2464 default:
2465 LOG(FATAL) << "Unexpected type conversion from " << input_type
2466 << " to " << result_type;
2467 }
2468 break;
2469
Roland Levillain981e4542014-11-14 11:47:14 +00002470 case Primitive::kPrimChar:
2471 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002472 case Primitive::kPrimLong:
2473 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002474 case Primitive::kPrimBoolean:
2475 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002476 case Primitive::kPrimByte:
2477 case Primitive::kPrimShort:
2478 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002479 // Processing a Dex `int-to-char' instruction.
2480 locations->SetInAt(0, Location::Any());
2481 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2482 break;
2483
2484 default:
2485 LOG(FATAL) << "Unexpected type conversion from " << input_type
2486 << " to " << result_type;
2487 }
2488 break;
2489
Roland Levillaindff1f282014-11-05 14:15:05 +00002490 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002491 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002492 case Primitive::kPrimBoolean:
2493 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002494 case Primitive::kPrimByte:
2495 case Primitive::kPrimShort:
2496 case Primitive::kPrimInt:
2497 case Primitive::kPrimChar:
2498 // Processing a Dex `int-to-float' instruction.
2499 locations->SetInAt(0, Location::RequiresRegister());
2500 locations->SetOut(Location::RequiresFpuRegister());
2501 break;
2502
2503 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002504 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002505 locations->SetInAt(0, Location::Any());
2506 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002507 break;
2508
Roland Levillaincff13742014-11-17 14:32:17 +00002509 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002510 // Processing a Dex `double-to-float' instruction.
2511 locations->SetInAt(0, Location::RequiresFpuRegister());
2512 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002513 break;
2514
2515 default:
2516 LOG(FATAL) << "Unexpected type conversion from " << input_type
2517 << " to " << result_type;
2518 };
2519 break;
2520
Roland Levillaindff1f282014-11-05 14:15:05 +00002521 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002522 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002523 case Primitive::kPrimBoolean:
2524 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002525 case Primitive::kPrimByte:
2526 case Primitive::kPrimShort:
2527 case Primitive::kPrimInt:
2528 case Primitive::kPrimChar:
2529 // Processing a Dex `int-to-double' instruction.
2530 locations->SetInAt(0, Location::RequiresRegister());
2531 locations->SetOut(Location::RequiresFpuRegister());
2532 break;
2533
2534 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002535 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002536 locations->SetInAt(0, Location::Any());
2537 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002538 break;
2539
Roland Levillaincff13742014-11-17 14:32:17 +00002540 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002541 // Processing a Dex `float-to-double' instruction.
2542 locations->SetInAt(0, Location::RequiresFpuRegister());
2543 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002544 break;
2545
2546 default:
2547 LOG(FATAL) << "Unexpected type conversion from " << input_type
2548 << " to " << result_type;
2549 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002550 break;
2551
2552 default:
2553 LOG(FATAL) << "Unexpected type conversion from " << input_type
2554 << " to " << result_type;
2555 }
2556}
2557
2558void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2559 LocationSummary* locations = conversion->GetLocations();
2560 Location out = locations->Out();
2561 Location in = locations->InAt(0);
2562 Primitive::Type result_type = conversion->GetResultType();
2563 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002564 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002565 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002566 case Primitive::kPrimByte:
2567 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002568 case Primitive::kPrimLong:
2569 // Type conversion from long to byte is a result of code transformations.
2570 if (in.IsRegisterPair()) {
2571 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2572 } else {
2573 DCHECK(in.GetConstant()->IsLongConstant());
2574 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2575 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2576 }
2577 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002578 case Primitive::kPrimBoolean:
2579 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002580 case Primitive::kPrimShort:
2581 case Primitive::kPrimInt:
2582 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002583 // Processing a Dex `int-to-byte' instruction.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002584 if (in.IsRegister()) {
2585 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002586 } else {
2587 DCHECK(in.GetConstant()->IsIntConstant());
2588 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2589 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2590 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002591 break;
2592
2593 default:
2594 LOG(FATAL) << "Unexpected type conversion from " << input_type
2595 << " to " << result_type;
2596 }
2597 break;
2598
Roland Levillain01a8d712014-11-14 16:27:39 +00002599 case Primitive::kPrimShort:
2600 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002601 case Primitive::kPrimLong:
2602 // Type conversion from long to short is a result of code transformations.
2603 if (in.IsRegisterPair()) {
2604 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2605 } else if (in.IsDoubleStackSlot()) {
2606 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2607 } else {
2608 DCHECK(in.GetConstant()->IsLongConstant());
2609 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2610 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2611 }
2612 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002613 case Primitive::kPrimBoolean:
2614 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002615 case Primitive::kPrimByte:
2616 case Primitive::kPrimInt:
2617 case Primitive::kPrimChar:
2618 // Processing a Dex `int-to-short' instruction.
2619 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002620 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002621 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002622 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002623 } else {
2624 DCHECK(in.GetConstant()->IsIntConstant());
2625 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002626 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002627 }
2628 break;
2629
2630 default:
2631 LOG(FATAL) << "Unexpected type conversion from " << input_type
2632 << " to " << result_type;
2633 }
2634 break;
2635
Roland Levillain946e1432014-11-11 17:35:19 +00002636 case Primitive::kPrimInt:
2637 switch (input_type) {
2638 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002639 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002640 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002641 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002642 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002643 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002644 } else {
2645 DCHECK(in.IsConstant());
2646 DCHECK(in.GetConstant()->IsLongConstant());
2647 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002648 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002649 }
2650 break;
2651
Roland Levillain3f8f9362014-12-02 17:45:01 +00002652 case Primitive::kPrimFloat: {
2653 // Processing a Dex `float-to-int' instruction.
2654 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2655 Register output = out.AsRegister<Register>();
2656 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002657 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002658
2659 __ movl(output, Immediate(kPrimIntMax));
2660 // temp = int-to-float(output)
2661 __ cvtsi2ss(temp, output);
2662 // if input >= temp goto done
2663 __ comiss(input, temp);
2664 __ j(kAboveEqual, &done);
2665 // if input == NaN goto nan
2666 __ j(kUnordered, &nan);
2667 // output = float-to-int-truncate(input)
2668 __ cvttss2si(output, input);
2669 __ jmp(&done);
2670 __ Bind(&nan);
2671 // output = 0
2672 __ xorl(output, output);
2673 __ Bind(&done);
2674 break;
2675 }
2676
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002677 case Primitive::kPrimDouble: {
2678 // Processing a Dex `double-to-int' instruction.
2679 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2680 Register output = out.AsRegister<Register>();
2681 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002682 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002683
2684 __ movl(output, Immediate(kPrimIntMax));
2685 // temp = int-to-double(output)
2686 __ cvtsi2sd(temp, output);
2687 // if input >= temp goto done
2688 __ comisd(input, temp);
2689 __ j(kAboveEqual, &done);
2690 // if input == NaN goto nan
2691 __ j(kUnordered, &nan);
2692 // output = double-to-int-truncate(input)
2693 __ cvttsd2si(output, input);
2694 __ jmp(&done);
2695 __ Bind(&nan);
2696 // output = 0
2697 __ xorl(output, output);
2698 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002699 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002700 }
Roland Levillain946e1432014-11-11 17:35:19 +00002701
2702 default:
2703 LOG(FATAL) << "Unexpected type conversion from " << input_type
2704 << " to " << result_type;
2705 }
2706 break;
2707
Roland Levillaindff1f282014-11-05 14:15:05 +00002708 case Primitive::kPrimLong:
2709 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002710 case Primitive::kPrimBoolean:
2711 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002712 case Primitive::kPrimByte:
2713 case Primitive::kPrimShort:
2714 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002715 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002716 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002717 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2718 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002719 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002720 __ cdq();
2721 break;
2722
2723 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002724 // Processing a Dex `float-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002725 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002726 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002727 break;
2728
Roland Levillaindff1f282014-11-05 14:15:05 +00002729 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002730 // Processing a Dex `double-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002731 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002732 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002733 break;
2734
2735 default:
2736 LOG(FATAL) << "Unexpected type conversion from " << input_type
2737 << " to " << result_type;
2738 }
2739 break;
2740
Roland Levillain981e4542014-11-14 11:47:14 +00002741 case Primitive::kPrimChar:
2742 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002743 case Primitive::kPrimLong:
2744 // Type conversion from long to short is a result of code transformations.
2745 if (in.IsRegisterPair()) {
2746 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2747 } else if (in.IsDoubleStackSlot()) {
2748 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2749 } else {
2750 DCHECK(in.GetConstant()->IsLongConstant());
2751 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2752 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2753 }
2754 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002755 case Primitive::kPrimBoolean:
2756 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002757 case Primitive::kPrimByte:
2758 case Primitive::kPrimShort:
2759 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002760 // Processing a Dex `Process a Dex `int-to-char'' instruction.
2761 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002762 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain981e4542014-11-14 11:47:14 +00002763 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002764 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain981e4542014-11-14 11:47:14 +00002765 } else {
2766 DCHECK(in.GetConstant()->IsIntConstant());
2767 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002768 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Roland Levillain981e4542014-11-14 11:47:14 +00002769 }
2770 break;
2771
2772 default:
2773 LOG(FATAL) << "Unexpected type conversion from " << input_type
2774 << " to " << result_type;
2775 }
2776 break;
2777
Roland Levillaindff1f282014-11-05 14:15:05 +00002778 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002779 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002780 case Primitive::kPrimBoolean:
2781 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002782 case Primitive::kPrimByte:
2783 case Primitive::kPrimShort:
2784 case Primitive::kPrimInt:
2785 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002786 // Processing a Dex `int-to-float' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002787 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002788 break;
2789
Roland Levillain6d0e4832014-11-27 18:31:21 +00002790 case Primitive::kPrimLong: {
2791 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002792 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002793
Roland Levillain232ade02015-04-20 15:14:36 +01002794 // Create stack space for the call to
2795 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2796 // TODO: enhance register allocator to ask for stack temporaries.
2797 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
2798 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2799 __ subl(ESP, Immediate(adjustment));
2800 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002801
Roland Levillain232ade02015-04-20 15:14:36 +01002802 // Load the value to the FP stack, using temporaries if needed.
2803 PushOntoFPStack(in, 0, adjustment, false, true);
2804
2805 if (out.IsStackSlot()) {
2806 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2807 } else {
2808 __ fstps(Address(ESP, 0));
2809 Location stack_temp = Location::StackSlot(0);
2810 codegen_->Move32(out, stack_temp);
2811 }
2812
2813 // Remove the temporary stack space we allocated.
2814 if (adjustment != 0) {
2815 __ addl(ESP, Immediate(adjustment));
2816 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002817 break;
2818 }
2819
Roland Levillaincff13742014-11-17 14:32:17 +00002820 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002821 // Processing a Dex `double-to-float' instruction.
2822 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002823 break;
2824
2825 default:
2826 LOG(FATAL) << "Unexpected type conversion from " << input_type
2827 << " to " << result_type;
2828 };
2829 break;
2830
Roland Levillaindff1f282014-11-05 14:15:05 +00002831 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002832 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002833 case Primitive::kPrimBoolean:
2834 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002835 case Primitive::kPrimByte:
2836 case Primitive::kPrimShort:
2837 case Primitive::kPrimInt:
2838 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002839 // Processing a Dex `int-to-double' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002840 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002841 break;
2842
Roland Levillain647b9ed2014-11-27 12:06:00 +00002843 case Primitive::kPrimLong: {
2844 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002845 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002846
Roland Levillain232ade02015-04-20 15:14:36 +01002847 // Create stack space for the call to
2848 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2849 // TODO: enhance register allocator to ask for stack temporaries.
2850 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
2851 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2852 __ subl(ESP, Immediate(adjustment));
2853 }
2854
2855 // Load the value to the FP stack, using temporaries if needed.
2856 PushOntoFPStack(in, 0, adjustment, false, true);
2857
2858 if (out.IsDoubleStackSlot()) {
2859 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2860 } else {
2861 __ fstpl(Address(ESP, 0));
2862 Location stack_temp = Location::DoubleStackSlot(0);
2863 codegen_->Move64(out, stack_temp);
2864 }
2865
2866 // Remove the temporary stack space we allocated.
2867 if (adjustment != 0) {
2868 __ addl(ESP, Immediate(adjustment));
2869 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002870 break;
2871 }
2872
Roland Levillaincff13742014-11-17 14:32:17 +00002873 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002874 // Processing a Dex `float-to-double' instruction.
2875 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002876 break;
2877
2878 default:
2879 LOG(FATAL) << "Unexpected type conversion from " << input_type
2880 << " to " << result_type;
2881 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002882 break;
2883
2884 default:
2885 LOG(FATAL) << "Unexpected type conversion from " << input_type
2886 << " to " << result_type;
2887 }
2888}
2889
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002890void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002891 LocationSummary* locations =
2892 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002893 switch (add->GetResultType()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002894 case Primitive::kPrimInt: {
2895 locations->SetInAt(0, Location::RequiresRegister());
2896 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2897 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2898 break;
2899 }
2900
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002901 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002902 locations->SetInAt(0, Location::RequiresRegister());
2903 locations->SetInAt(1, Location::Any());
2904 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002905 break;
2906 }
2907
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002908 case Primitive::kPrimFloat:
2909 case Primitive::kPrimDouble: {
2910 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002911 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2912 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00002913 } else if (add->InputAt(1)->IsConstant()) {
2914 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002915 } else {
2916 locations->SetInAt(1, Location::Any());
2917 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002918 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002919 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002920 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002921
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002922 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002923 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2924 break;
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002925 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002926}
2927
2928void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
2929 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002930 Location first = locations->InAt(0);
2931 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05002932 Location out = locations->Out();
2933
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002934 switch (add->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002935 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002936 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002937 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2938 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002939 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2940 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05002941 } else {
2942 __ leal(out.AsRegister<Register>(), Address(
2943 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
2944 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002945 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002946 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
2947 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2948 __ addl(out.AsRegister<Register>(), Immediate(value));
2949 } else {
2950 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
2951 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002952 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05002953 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002954 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002955 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002956 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002957 }
2958
2959 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00002960 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002961 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
2962 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00002963 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002964 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
2965 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002966 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00002967 } else {
2968 DCHECK(second.IsConstant()) << second;
2969 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
2970 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
2971 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002972 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002973 break;
2974 }
2975
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002976 case Primitive::kPrimFloat: {
2977 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002978 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04002979 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2980 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00002981 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04002982 __ addss(first.AsFpuRegister<XmmRegister>(),
2983 codegen_->LiteralFloatAddress(
2984 const_area->GetConstant()->AsFloatConstant()->GetValue(),
2985 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
2986 } else {
2987 DCHECK(second.IsStackSlot());
2988 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002989 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002990 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002991 }
2992
2993 case Primitive::kPrimDouble: {
2994 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002995 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04002996 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2997 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00002998 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04002999 __ addsd(first.AsFpuRegister<XmmRegister>(),
3000 codegen_->LiteralDoubleAddress(
3001 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3002 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3003 } else {
3004 DCHECK(second.IsDoubleStackSlot());
3005 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003006 }
3007 break;
3008 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003009
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003010 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003011 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003012 }
3013}
3014
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003015void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003016 LocationSummary* locations =
3017 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003018 switch (sub->GetResultType()) {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003019 case Primitive::kPrimInt:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003020 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003021 locations->SetInAt(0, Location::RequiresRegister());
3022 locations->SetInAt(1, Location::Any());
3023 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003024 break;
3025 }
Calin Juravle11351682014-10-23 15:38:15 +01003026 case Primitive::kPrimFloat:
3027 case Primitive::kPrimDouble: {
3028 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003029 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3030 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003031 } else if (sub->InputAt(1)->IsConstant()) {
3032 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003033 } else {
3034 locations->SetInAt(1, Location::Any());
3035 }
Calin Juravle11351682014-10-23 15:38:15 +01003036 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003037 break;
Calin Juravle11351682014-10-23 15:38:15 +01003038 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003039
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003040 default:
Calin Juravle11351682014-10-23 15:38:15 +01003041 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003042 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003043}
3044
3045void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3046 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003047 Location first = locations->InAt(0);
3048 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003049 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003050 switch (sub->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003051 case Primitive::kPrimInt: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003052 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003053 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003054 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003055 __ subl(first.AsRegister<Register>(),
3056 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003057 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003058 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003059 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003060 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003061 }
3062
3063 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003064 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003065 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3066 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003067 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003068 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003069 __ sbbl(first.AsRegisterPairHigh<Register>(),
3070 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003071 } else {
3072 DCHECK(second.IsConstant()) << second;
3073 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3074 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3075 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003076 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003077 break;
3078 }
3079
Calin Juravle11351682014-10-23 15:38:15 +01003080 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003081 if (second.IsFpuRegister()) {
3082 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3083 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3084 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003085 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003086 __ subss(first.AsFpuRegister<XmmRegister>(),
3087 codegen_->LiteralFloatAddress(
3088 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3089 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3090 } else {
3091 DCHECK(second.IsStackSlot());
3092 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3093 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003094 break;
Calin Juravle11351682014-10-23 15:38:15 +01003095 }
3096
3097 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003098 if (second.IsFpuRegister()) {
3099 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3100 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3101 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003102 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003103 __ subsd(first.AsFpuRegister<XmmRegister>(),
3104 codegen_->LiteralDoubleAddress(
3105 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3106 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3107 } else {
3108 DCHECK(second.IsDoubleStackSlot());
3109 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3110 }
Calin Juravle11351682014-10-23 15:38:15 +01003111 break;
3112 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003113
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003114 default:
Calin Juravle11351682014-10-23 15:38:15 +01003115 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003116 }
3117}
3118
Calin Juravle34bacdf2014-10-07 20:23:36 +01003119void LocationsBuilderX86::VisitMul(HMul* mul) {
3120 LocationSummary* locations =
3121 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3122 switch (mul->GetResultType()) {
3123 case Primitive::kPrimInt:
3124 locations->SetInAt(0, Location::RequiresRegister());
3125 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003126 if (mul->InputAt(1)->IsIntConstant()) {
3127 // Can use 3 operand multiply.
3128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3129 } else {
3130 locations->SetOut(Location::SameAsFirstInput());
3131 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003132 break;
3133 case Primitive::kPrimLong: {
3134 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003135 locations->SetInAt(1, Location::Any());
3136 locations->SetOut(Location::SameAsFirstInput());
3137 // Needed for imul on 32bits with 64bits output.
3138 locations->AddTemp(Location::RegisterLocation(EAX));
3139 locations->AddTemp(Location::RegisterLocation(EDX));
3140 break;
3141 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003142 case Primitive::kPrimFloat:
3143 case Primitive::kPrimDouble: {
3144 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003145 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3146 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003147 } else if (mul->InputAt(1)->IsConstant()) {
3148 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003149 } else {
3150 locations->SetInAt(1, Location::Any());
3151 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003152 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003153 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003154 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003155
3156 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003157 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003158 }
3159}
3160
3161void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3162 LocationSummary* locations = mul->GetLocations();
3163 Location first = locations->InAt(0);
3164 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003165 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003166
3167 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003168 case Primitive::kPrimInt:
3169 // The constant may have ended up in a register, so test explicitly to avoid
3170 // problems where the output may not be the same as the first operand.
3171 if (mul->InputAt(1)->IsIntConstant()) {
3172 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3173 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3174 } else if (second.IsRegister()) {
3175 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003176 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003177 } else {
3178 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003179 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003180 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003181 }
3182 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003183
3184 case Primitive::kPrimLong: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003185 Register in1_hi = first.AsRegisterPairHigh<Register>();
3186 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003187 Register eax = locations->GetTemp(0).AsRegister<Register>();
3188 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003189
3190 DCHECK_EQ(EAX, eax);
3191 DCHECK_EQ(EDX, edx);
3192
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003193 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003194 // output: in1
3195 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3196 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3197 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003198 if (second.IsConstant()) {
3199 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003200
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003201 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3202 int32_t low_value = Low32Bits(value);
3203 int32_t high_value = High32Bits(value);
3204 Immediate low(low_value);
3205 Immediate high(high_value);
3206
3207 __ movl(eax, high);
3208 // eax <- in1.lo * in2.hi
3209 __ imull(eax, in1_lo);
3210 // in1.hi <- in1.hi * in2.lo
3211 __ imull(in1_hi, low);
3212 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3213 __ addl(in1_hi, eax);
3214 // move in2_lo to eax to prepare for double precision
3215 __ movl(eax, low);
3216 // edx:eax <- in1.lo * in2.lo
3217 __ mull(in1_lo);
3218 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3219 __ addl(in1_hi, edx);
3220 // in1.lo <- (in1.lo * in2.lo)[31:0];
3221 __ movl(in1_lo, eax);
3222 } else if (second.IsRegisterPair()) {
3223 Register in2_hi = second.AsRegisterPairHigh<Register>();
3224 Register in2_lo = second.AsRegisterPairLow<Register>();
3225
3226 __ movl(eax, in2_hi);
3227 // eax <- in1.lo * in2.hi
3228 __ imull(eax, in1_lo);
3229 // in1.hi <- in1.hi * in2.lo
3230 __ imull(in1_hi, in2_lo);
3231 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3232 __ addl(in1_hi, eax);
3233 // move in1_lo to eax to prepare for double precision
3234 __ movl(eax, in1_lo);
3235 // edx:eax <- in1.lo * in2.lo
3236 __ mull(in2_lo);
3237 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3238 __ addl(in1_hi, edx);
3239 // in1.lo <- (in1.lo * in2.lo)[31:0];
3240 __ movl(in1_lo, eax);
3241 } else {
3242 DCHECK(second.IsDoubleStackSlot()) << second;
3243 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3244 Address in2_lo(ESP, second.GetStackIndex());
3245
3246 __ movl(eax, in2_hi);
3247 // eax <- in1.lo * in2.hi
3248 __ imull(eax, in1_lo);
3249 // in1.hi <- in1.hi * in2.lo
3250 __ imull(in1_hi, in2_lo);
3251 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3252 __ addl(in1_hi, eax);
3253 // move in1_lo to eax to prepare for double precision
3254 __ movl(eax, in1_lo);
3255 // edx:eax <- in1.lo * in2.lo
3256 __ mull(in2_lo);
3257 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3258 __ addl(in1_hi, edx);
3259 // in1.lo <- (in1.lo * in2.lo)[31:0];
3260 __ movl(in1_lo, eax);
3261 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003262
3263 break;
3264 }
3265
Calin Juravleb5bfa962014-10-21 18:02:24 +01003266 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003267 DCHECK(first.Equals(locations->Out()));
3268 if (second.IsFpuRegister()) {
3269 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3270 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3271 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003272 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003273 __ mulss(first.AsFpuRegister<XmmRegister>(),
3274 codegen_->LiteralFloatAddress(
3275 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3276 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3277 } else {
3278 DCHECK(second.IsStackSlot());
3279 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3280 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003281 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003282 }
3283
3284 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003285 DCHECK(first.Equals(locations->Out()));
3286 if (second.IsFpuRegister()) {
3287 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3288 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3289 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003290 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003291 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3292 codegen_->LiteralDoubleAddress(
3293 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3294 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3295 } else {
3296 DCHECK(second.IsDoubleStackSlot());
3297 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3298 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003299 break;
3300 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003301
3302 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003303 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003304 }
3305}
3306
Roland Levillain232ade02015-04-20 15:14:36 +01003307void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3308 uint32_t temp_offset,
3309 uint32_t stack_adjustment,
3310 bool is_fp,
3311 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003312 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003313 DCHECK(!is_wide);
3314 if (is_fp) {
3315 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3316 } else {
3317 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3318 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003319 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003320 DCHECK(is_wide);
3321 if (is_fp) {
3322 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3323 } else {
3324 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3325 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003326 } else {
3327 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003328 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003329 Location stack_temp = Location::StackSlot(temp_offset);
3330 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003331 if (is_fp) {
3332 __ flds(Address(ESP, temp_offset));
3333 } else {
3334 __ filds(Address(ESP, temp_offset));
3335 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003336 } else {
3337 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3338 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003339 if (is_fp) {
3340 __ fldl(Address(ESP, temp_offset));
3341 } else {
3342 __ fildl(Address(ESP, temp_offset));
3343 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003344 }
3345 }
3346}
3347
3348void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
3349 Primitive::Type type = rem->GetResultType();
3350 bool is_float = type == Primitive::kPrimFloat;
3351 size_t elem_size = Primitive::ComponentSize(type);
3352 LocationSummary* locations = rem->GetLocations();
3353 Location first = locations->InAt(0);
3354 Location second = locations->InAt(1);
3355 Location out = locations->Out();
3356
3357 // Create stack space for 2 elements.
3358 // TODO: enhance register allocator to ask for stack temporaries.
3359 __ subl(ESP, Immediate(2 * elem_size));
3360
3361 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003362 const bool is_wide = !is_float;
3363 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp */ true, is_wide);
3364 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003365
3366 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003367 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003368 __ Bind(&retry);
3369 __ fprem();
3370
3371 // Move FP status to AX.
3372 __ fstsw();
3373
3374 // And see if the argument reduction is complete. This is signaled by the
3375 // C2 FPU flag bit set to 0.
3376 __ andl(EAX, Immediate(kC2ConditionMask));
3377 __ j(kNotEqual, &retry);
3378
3379 // We have settled on the final value. Retrieve it into an XMM register.
3380 // Store FP top of stack to real stack.
3381 if (is_float) {
3382 __ fsts(Address(ESP, 0));
3383 } else {
3384 __ fstl(Address(ESP, 0));
3385 }
3386
3387 // Pop the 2 items from the FP stack.
3388 __ fucompp();
3389
3390 // Load the value from the stack into an XMM register.
3391 DCHECK(out.IsFpuRegister()) << out;
3392 if (is_float) {
3393 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3394 } else {
3395 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3396 }
3397
3398 // And remove the temporary stack space we allocated.
3399 __ addl(ESP, Immediate(2 * elem_size));
3400}
3401
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003402
3403void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3404 DCHECK(instruction->IsDiv() || instruction->IsRem());
3405
3406 LocationSummary* locations = instruction->GetLocations();
3407 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003408 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003409
3410 Register out_register = locations->Out().AsRegister<Register>();
3411 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003412 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003413
3414 DCHECK(imm == 1 || imm == -1);
3415
3416 if (instruction->IsRem()) {
3417 __ xorl(out_register, out_register);
3418 } else {
3419 __ movl(out_register, input_register);
3420 if (imm == -1) {
3421 __ negl(out_register);
3422 }
3423 }
3424}
3425
3426
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003427void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003428 LocationSummary* locations = instruction->GetLocations();
3429
3430 Register out_register = locations->Out().AsRegister<Register>();
3431 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003432 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003433 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3434 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003435
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003436 Register num = locations->GetTemp(0).AsRegister<Register>();
3437
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003438 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003439 __ testl(input_register, input_register);
3440 __ cmovl(kGreaterEqual, num, input_register);
3441 int shift = CTZ(imm);
3442 __ sarl(num, Immediate(shift));
3443
3444 if (imm < 0) {
3445 __ negl(num);
3446 }
3447
3448 __ movl(out_register, num);
3449}
3450
3451void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3452 DCHECK(instruction->IsDiv() || instruction->IsRem());
3453
3454 LocationSummary* locations = instruction->GetLocations();
3455 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3456
3457 Register eax = locations->InAt(0).AsRegister<Register>();
3458 Register out = locations->Out().AsRegister<Register>();
3459 Register num;
3460 Register edx;
3461
3462 if (instruction->IsDiv()) {
3463 edx = locations->GetTemp(0).AsRegister<Register>();
3464 num = locations->GetTemp(1).AsRegister<Register>();
3465 } else {
3466 edx = locations->Out().AsRegister<Register>();
3467 num = locations->GetTemp(0).AsRegister<Register>();
3468 }
3469
3470 DCHECK_EQ(EAX, eax);
3471 DCHECK_EQ(EDX, edx);
3472 if (instruction->IsDiv()) {
3473 DCHECK_EQ(EAX, out);
3474 } else {
3475 DCHECK_EQ(EDX, out);
3476 }
3477
3478 int64_t magic;
3479 int shift;
3480 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3481
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003482 // Save the numerator.
3483 __ movl(num, eax);
3484
3485 // EAX = magic
3486 __ movl(eax, Immediate(magic));
3487
3488 // EDX:EAX = magic * numerator
3489 __ imull(num);
3490
3491 if (imm > 0 && magic < 0) {
3492 // EDX += num
3493 __ addl(edx, num);
3494 } else if (imm < 0 && magic > 0) {
3495 __ subl(edx, num);
3496 }
3497
3498 // Shift if needed.
3499 if (shift != 0) {
3500 __ sarl(edx, Immediate(shift));
3501 }
3502
3503 // EDX += 1 if EDX < 0
3504 __ movl(eax, edx);
3505 __ shrl(edx, Immediate(31));
3506 __ addl(edx, eax);
3507
3508 if (instruction->IsRem()) {
3509 __ movl(eax, num);
3510 __ imull(edx, Immediate(imm));
3511 __ subl(eax, edx);
3512 __ movl(edx, eax);
3513 } else {
3514 __ movl(eax, edx);
3515 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003516}
3517
Calin Juravlebacfec32014-11-14 15:54:36 +00003518void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3519 DCHECK(instruction->IsDiv() || instruction->IsRem());
3520
3521 LocationSummary* locations = instruction->GetLocations();
3522 Location out = locations->Out();
3523 Location first = locations->InAt(0);
3524 Location second = locations->InAt(1);
3525 bool is_div = instruction->IsDiv();
3526
3527 switch (instruction->GetResultType()) {
3528 case Primitive::kPrimInt: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003529 DCHECK_EQ(EAX, first.AsRegister<Register>());
3530 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003531
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003532 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003533 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003534
3535 if (imm == 0) {
3536 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3537 } else if (imm == 1 || imm == -1) {
3538 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003539 } else if (is_div && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003540 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003541 } else {
3542 DCHECK(imm <= -2 || imm >= 2);
3543 GenerateDivRemWithAnyConstant(instruction);
3544 }
3545 } else {
David Srbecky9cd6d372016-02-09 15:24:47 +00003546 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86(
3547 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003548 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003549
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003550 Register second_reg = second.AsRegister<Register>();
3551 // 0x80000000/-1 triggers an arithmetic exception!
3552 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3553 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003554
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003555 __ cmpl(second_reg, Immediate(-1));
3556 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003557
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003558 // edx:eax <- sign-extended of eax
3559 __ cdq();
3560 // eax = quotient, edx = remainder
3561 __ idivl(second_reg);
3562 __ Bind(slow_path->GetExitLabel());
3563 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003564 break;
3565 }
3566
3567 case Primitive::kPrimLong: {
3568 InvokeRuntimeCallingConvention calling_convention;
3569 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3570 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3571 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3572 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3573 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3574 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3575
3576 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003577 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003578 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003579 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003580 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003581 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003582 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003583 break;
3584 }
3585
3586 default:
3587 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3588 }
3589}
3590
Calin Juravle7c4954d2014-10-28 16:57:40 +00003591void LocationsBuilderX86::VisitDiv(HDiv* div) {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003592 LocationSummary::CallKind call_kind = (div->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003593 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003594 : LocationSummary::kNoCall;
3595 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3596
Calin Juravle7c4954d2014-10-28 16:57:40 +00003597 switch (div->GetResultType()) {
Calin Juravled0d48522014-11-04 16:40:20 +00003598 case Primitive::kPrimInt: {
3599 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003600 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003601 locations->SetOut(Location::SameAsFirstInput());
3602 // Intel uses edx:eax as the dividend.
3603 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003604 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3605 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3606 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003607 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003608 locations->AddTemp(Location::RequiresRegister());
3609 }
Calin Juravled0d48522014-11-04 16:40:20 +00003610 break;
3611 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003612 case Primitive::kPrimLong: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003613 InvokeRuntimeCallingConvention calling_convention;
3614 locations->SetInAt(0, Location::RegisterPairLocation(
3615 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3616 locations->SetInAt(1, Location::RegisterPairLocation(
3617 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3618 // Runtime helper puts the result in EAX, EDX.
3619 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003620 break;
3621 }
3622 case Primitive::kPrimFloat:
3623 case Primitive::kPrimDouble: {
3624 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003625 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3626 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003627 } else if (div->InputAt(1)->IsConstant()) {
3628 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003629 } else {
3630 locations->SetInAt(1, Location::Any());
3631 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003632 locations->SetOut(Location::SameAsFirstInput());
3633 break;
3634 }
3635
3636 default:
3637 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3638 }
3639}
3640
3641void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3642 LocationSummary* locations = div->GetLocations();
3643 Location first = locations->InAt(0);
3644 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003645
3646 switch (div->GetResultType()) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003647 case Primitive::kPrimInt:
Calin Juravle7c4954d2014-10-28 16:57:40 +00003648 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003649 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003650 break;
3651 }
3652
3653 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003654 if (second.IsFpuRegister()) {
3655 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3656 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3657 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003658 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003659 __ divss(first.AsFpuRegister<XmmRegister>(),
3660 codegen_->LiteralFloatAddress(
3661 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3662 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3663 } else {
3664 DCHECK(second.IsStackSlot());
3665 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3666 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003667 break;
3668 }
3669
3670 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003671 if (second.IsFpuRegister()) {
3672 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3673 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3674 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003675 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003676 __ divsd(first.AsFpuRegister<XmmRegister>(),
3677 codegen_->LiteralDoubleAddress(
3678 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3679 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3680 } else {
3681 DCHECK(second.IsDoubleStackSlot());
3682 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3683 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003684 break;
3685 }
3686
3687 default:
3688 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3689 }
3690}
3691
Calin Juravlebacfec32014-11-14 15:54:36 +00003692void LocationsBuilderX86::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003693 Primitive::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003694
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003695 LocationSummary::CallKind call_kind = (rem->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003696 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003697 : LocationSummary::kNoCall;
3698 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003699
Calin Juravled2ec87d2014-12-08 14:24:46 +00003700 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003701 case Primitive::kPrimInt: {
3702 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003703 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003704 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003705 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3706 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3707 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003708 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003709 locations->AddTemp(Location::RequiresRegister());
3710 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003711 break;
3712 }
3713 case Primitive::kPrimLong: {
3714 InvokeRuntimeCallingConvention calling_convention;
3715 locations->SetInAt(0, Location::RegisterPairLocation(
3716 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3717 locations->SetInAt(1, Location::RegisterPairLocation(
3718 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3719 // Runtime helper puts the result in EAX, EDX.
3720 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3721 break;
3722 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003723 case Primitive::kPrimDouble:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003724 case Primitive::kPrimFloat: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003725 locations->SetInAt(0, Location::Any());
3726 locations->SetInAt(1, Location::Any());
3727 locations->SetOut(Location::RequiresFpuRegister());
3728 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003729 break;
3730 }
3731
3732 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003733 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003734 }
3735}
3736
3737void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
3738 Primitive::Type type = rem->GetResultType();
3739 switch (type) {
3740 case Primitive::kPrimInt:
3741 case Primitive::kPrimLong: {
3742 GenerateDivRemIntegral(rem);
3743 break;
3744 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003745 case Primitive::kPrimFloat:
Calin Juravlebacfec32014-11-14 15:54:36 +00003746 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003747 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003748 break;
3749 }
3750 default:
3751 LOG(FATAL) << "Unexpected rem type " << type;
3752 }
3753}
3754
Calin Juravled0d48522014-11-04 16:40:20 +00003755void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003756 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003757 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003758 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003759 case Primitive::kPrimByte:
3760 case Primitive::kPrimChar:
3761 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003762 case Primitive::kPrimInt: {
3763 locations->SetInAt(0, Location::Any());
3764 break;
3765 }
3766 case Primitive::kPrimLong: {
3767 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
3768 if (!instruction->IsConstant()) {
3769 locations->AddTemp(Location::RequiresRegister());
3770 }
3771 break;
3772 }
3773 default:
3774 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
3775 }
Calin Juravled0d48522014-11-04 16:40:20 +00003776}
3777
3778void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003779 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003780 codegen_->AddSlowPath(slow_path);
3781
3782 LocationSummary* locations = instruction->GetLocations();
3783 Location value = locations->InAt(0);
3784
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003785 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003786 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003787 case Primitive::kPrimByte:
3788 case Primitive::kPrimChar:
3789 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003790 case Primitive::kPrimInt: {
3791 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003792 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003793 __ j(kEqual, slow_path->GetEntryLabel());
3794 } else if (value.IsStackSlot()) {
3795 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
3796 __ j(kEqual, slow_path->GetEntryLabel());
3797 } else {
3798 DCHECK(value.IsConstant()) << value;
3799 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003800 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003801 }
3802 }
3803 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003804 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003805 case Primitive::kPrimLong: {
3806 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003807 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003808 __ movl(temp, value.AsRegisterPairLow<Register>());
3809 __ orl(temp, value.AsRegisterPairHigh<Register>());
3810 __ j(kEqual, slow_path->GetEntryLabel());
3811 } else {
3812 DCHECK(value.IsConstant()) << value;
3813 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3814 __ jmp(slow_path->GetEntryLabel());
3815 }
3816 }
3817 break;
3818 }
3819 default:
3820 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003821 }
Calin Juravled0d48522014-11-04 16:40:20 +00003822}
3823
Calin Juravle9aec02f2014-11-18 23:06:35 +00003824void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
3825 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3826
3827 LocationSummary* locations =
3828 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3829
3830 switch (op->GetResultType()) {
Mark P Mendell73945692015-04-29 14:56:17 +00003831 case Primitive::kPrimInt:
Calin Juravle9aec02f2014-11-18 23:06:35 +00003832 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003833 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00003834 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00003835 // The shift count needs to be in CL or a constant.
3836 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00003837 locations->SetOut(Location::SameAsFirstInput());
3838 break;
3839 }
3840 default:
3841 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3842 }
3843}
3844
3845void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
3846 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3847
3848 LocationSummary* locations = op->GetLocations();
3849 Location first = locations->InAt(0);
3850 Location second = locations->InAt(1);
3851 DCHECK(first.Equals(locations->Out()));
3852
3853 switch (op->GetResultType()) {
3854 case Primitive::kPrimInt: {
Mark P Mendell73945692015-04-29 14:56:17 +00003855 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003856 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003857 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003858 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003859 DCHECK_EQ(ECX, second_reg);
3860 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003861 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003862 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003863 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003864 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003865 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003866 }
3867 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003868 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003869 if (shift == 0) {
3870 return;
3871 }
3872 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003873 if (op->IsShl()) {
3874 __ shll(first_reg, imm);
3875 } else if (op->IsShr()) {
3876 __ sarl(first_reg, imm);
3877 } else {
3878 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003879 }
3880 }
3881 break;
3882 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003883 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003884 if (second.IsRegister()) {
3885 Register second_reg = second.AsRegister<Register>();
3886 DCHECK_EQ(ECX, second_reg);
3887 if (op->IsShl()) {
3888 GenerateShlLong(first, second_reg);
3889 } else if (op->IsShr()) {
3890 GenerateShrLong(first, second_reg);
3891 } else {
3892 GenerateUShrLong(first, second_reg);
3893 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003894 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00003895 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00003896 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003897 // Nothing to do if the shift is 0, as the input is already the output.
3898 if (shift != 0) {
3899 if (op->IsShl()) {
3900 GenerateShlLong(first, shift);
3901 } else if (op->IsShr()) {
3902 GenerateShrLong(first, shift);
3903 } else {
3904 GenerateUShrLong(first, shift);
3905 }
3906 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003907 }
3908 break;
3909 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00003910 default:
3911 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3912 }
3913}
3914
Mark P Mendell73945692015-04-29 14:56:17 +00003915void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
3916 Register low = loc.AsRegisterPairLow<Register>();
3917 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04003918 if (shift == 1) {
3919 // This is just an addition.
3920 __ addl(low, low);
3921 __ adcl(high, high);
3922 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00003923 // Shift by 32 is easy. High gets low, and low gets 0.
3924 codegen_->EmitParallelMoves(
3925 loc.ToLow(),
3926 loc.ToHigh(),
3927 Primitive::kPrimInt,
3928 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
3929 loc.ToLow(),
3930 Primitive::kPrimInt);
3931 } else if (shift > 32) {
3932 // Low part becomes 0. High part is low part << (shift-32).
3933 __ movl(high, low);
3934 __ shll(high, Immediate(shift - 32));
3935 __ xorl(low, low);
3936 } else {
3937 // Between 1 and 31.
3938 __ shld(high, low, Immediate(shift));
3939 __ shll(low, Immediate(shift));
3940 }
3941}
3942
Calin Juravle9aec02f2014-11-18 23:06:35 +00003943void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04003944 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00003945 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
3946 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
3947 __ testl(shifter, Immediate(32));
3948 __ j(kEqual, &done);
3949 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
3950 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
3951 __ Bind(&done);
3952}
3953
Mark P Mendell73945692015-04-29 14:56:17 +00003954void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
3955 Register low = loc.AsRegisterPairLow<Register>();
3956 Register high = loc.AsRegisterPairHigh<Register>();
3957 if (shift == 32) {
3958 // Need to copy the sign.
3959 DCHECK_NE(low, high);
3960 __ movl(low, high);
3961 __ sarl(high, Immediate(31));
3962 } else if (shift > 32) {
3963 DCHECK_NE(low, high);
3964 // High part becomes sign. Low part is shifted by shift - 32.
3965 __ movl(low, high);
3966 __ sarl(high, Immediate(31));
3967 __ sarl(low, Immediate(shift - 32));
3968 } else {
3969 // Between 1 and 31.
3970 __ shrd(low, high, Immediate(shift));
3971 __ sarl(high, Immediate(shift));
3972 }
3973}
3974
Calin Juravle9aec02f2014-11-18 23:06:35 +00003975void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04003976 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00003977 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
3978 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
3979 __ testl(shifter, Immediate(32));
3980 __ j(kEqual, &done);
3981 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
3982 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
3983 __ Bind(&done);
3984}
3985
Mark P Mendell73945692015-04-29 14:56:17 +00003986void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
3987 Register low = loc.AsRegisterPairLow<Register>();
3988 Register high = loc.AsRegisterPairHigh<Register>();
3989 if (shift == 32) {
3990 // Shift by 32 is easy. Low gets high, and high gets 0.
3991 codegen_->EmitParallelMoves(
3992 loc.ToHigh(),
3993 loc.ToLow(),
3994 Primitive::kPrimInt,
3995 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
3996 loc.ToHigh(),
3997 Primitive::kPrimInt);
3998 } else if (shift > 32) {
3999 // Low part is high >> (shift - 32). High part becomes 0.
4000 __ movl(low, high);
4001 __ shrl(low, Immediate(shift - 32));
4002 __ xorl(high, high);
4003 } else {
4004 // Between 1 and 31.
4005 __ shrd(low, high, Immediate(shift));
4006 __ shrl(high, Immediate(shift));
4007 }
4008}
4009
Calin Juravle9aec02f2014-11-18 23:06:35 +00004010void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004011 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004012 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4013 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4014 __ testl(shifter, Immediate(32));
4015 __ j(kEqual, &done);
4016 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4017 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4018 __ Bind(&done);
4019}
4020
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004021void LocationsBuilderX86::VisitRor(HRor* ror) {
4022 LocationSummary* locations =
4023 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4024
4025 switch (ror->GetResultType()) {
4026 case Primitive::kPrimLong:
4027 // Add the temporary needed.
4028 locations->AddTemp(Location::RequiresRegister());
4029 FALLTHROUGH_INTENDED;
4030 case Primitive::kPrimInt:
4031 locations->SetInAt(0, Location::RequiresRegister());
4032 // The shift count needs to be in CL (unless it is a constant).
4033 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4034 locations->SetOut(Location::SameAsFirstInput());
4035 break;
4036 default:
4037 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4038 UNREACHABLE();
4039 }
4040}
4041
4042void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4043 LocationSummary* locations = ror->GetLocations();
4044 Location first = locations->InAt(0);
4045 Location second = locations->InAt(1);
4046
4047 if (ror->GetResultType() == Primitive::kPrimInt) {
4048 Register first_reg = first.AsRegister<Register>();
4049 if (second.IsRegister()) {
4050 Register second_reg = second.AsRegister<Register>();
4051 __ rorl(first_reg, second_reg);
4052 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004053 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004054 __ rorl(first_reg, imm);
4055 }
4056 return;
4057 }
4058
4059 DCHECK_EQ(ror->GetResultType(), Primitive::kPrimLong);
4060 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4061 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4062 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4063 if (second.IsRegister()) {
4064 Register second_reg = second.AsRegister<Register>();
4065 DCHECK_EQ(second_reg, ECX);
4066 __ movl(temp_reg, first_reg_hi);
4067 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4068 __ shrd(first_reg_lo, temp_reg, second_reg);
4069 __ movl(temp_reg, first_reg_hi);
4070 __ testl(second_reg, Immediate(32));
4071 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4072 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4073 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004074 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004075 if (shift_amt == 0) {
4076 // Already fine.
4077 return;
4078 }
4079 if (shift_amt == 32) {
4080 // Just swap.
4081 __ movl(temp_reg, first_reg_lo);
4082 __ movl(first_reg_lo, first_reg_hi);
4083 __ movl(first_reg_hi, temp_reg);
4084 return;
4085 }
4086
4087 Immediate imm(shift_amt);
4088 // Save the constents of the low value.
4089 __ movl(temp_reg, first_reg_lo);
4090
4091 // Shift right into low, feeding bits from high.
4092 __ shrd(first_reg_lo, first_reg_hi, imm);
4093
4094 // Shift right into high, feeding bits from the original low.
4095 __ shrd(first_reg_hi, temp_reg, imm);
4096
4097 // Swap if needed.
4098 if (shift_amt > 32) {
4099 __ movl(temp_reg, first_reg_lo);
4100 __ movl(first_reg_lo, first_reg_hi);
4101 __ movl(first_reg_hi, temp_reg);
4102 }
4103 }
4104}
4105
Calin Juravle9aec02f2014-11-18 23:06:35 +00004106void LocationsBuilderX86::VisitShl(HShl* shl) {
4107 HandleShift(shl);
4108}
4109
4110void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4111 HandleShift(shl);
4112}
4113
4114void LocationsBuilderX86::VisitShr(HShr* shr) {
4115 HandleShift(shr);
4116}
4117
4118void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4119 HandleShift(shr);
4120}
4121
4122void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4123 HandleShift(ushr);
4124}
4125
4126void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4127 HandleShift(ushr);
4128}
4129
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004130void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004131 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004132 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004133 locations->SetOut(Location::RegisterLocation(EAX));
David Brazdil6de19382016-01-08 17:37:10 +00004134 if (instruction->IsStringAlloc()) {
4135 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4136 } else {
4137 InvokeRuntimeCallingConvention calling_convention;
4138 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4139 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
4140 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004141}
4142
4143void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004144 // Note: if heap poisoning is enabled, the entry point takes cares
4145 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004146 if (instruction->IsStringAlloc()) {
4147 // String is allocated through StringFactory. Call NewEmptyString entry point.
4148 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004149 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004150 __ fs()->movl(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString)));
4151 __ call(Address(temp, code_offset.Int32Value()));
4152 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4153 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004154 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00004155 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
4156 DCHECK(!codegen_->IsLeafMethod());
4157 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004158}
4159
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004160void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
4161 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004162 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004163 locations->SetOut(Location::RegisterLocation(EAX));
4164 InvokeRuntimeCallingConvention calling_convention;
4165 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004166 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004167 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004168}
4169
4170void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
4171 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004172 __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
Roland Levillain4d027112015-07-01 15:41:14 +01004173 // Note: if heap poisoning is enabled, the entry point takes cares
4174 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004175 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004176 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004177 DCHECK(!codegen_->IsLeafMethod());
4178}
4179
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004180void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004181 LocationSummary* locations =
4182 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004183 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4184 if (location.IsStackSlot()) {
4185 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4186 } else if (location.IsDoubleStackSlot()) {
4187 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004188 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004189 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004190}
4191
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004192void InstructionCodeGeneratorX86::VisitParameterValue(
4193 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4194}
4195
4196void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4197 LocationSummary* locations =
4198 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4199 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4200}
4201
4202void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004203}
4204
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004205void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4206 LocationSummary* locations =
4207 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4208 locations->SetInAt(0, Location::RequiresRegister());
4209 locations->SetOut(Location::RequiresRegister());
4210}
4211
4212void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4213 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004214 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004215 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004216 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004217 __ movl(locations->Out().AsRegister<Register>(),
4218 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004219 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004220 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004221 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004222 __ movl(locations->Out().AsRegister<Register>(),
4223 Address(locations->InAt(0).AsRegister<Register>(),
4224 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4225 // temp = temp->GetImtEntryAt(method_offset);
4226 __ movl(locations->Out().AsRegister<Register>(),
4227 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004228 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004229}
4230
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004231void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004232 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004233 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004234 locations->SetInAt(0, Location::RequiresRegister());
4235 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004236}
4237
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004238void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4239 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004240 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004241 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004242 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004243 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004244 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004245 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004246 break;
4247
4248 case Primitive::kPrimLong:
Roland Levillain70566432014-10-24 16:20:17 +01004249 __ notl(out.AsRegisterPairLow<Register>());
4250 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004251 break;
4252
4253 default:
4254 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4255 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004256}
4257
David Brazdil66d126e2015-04-03 16:02:44 +01004258void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4259 LocationSummary* locations =
4260 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4261 locations->SetInAt(0, Location::RequiresRegister());
4262 locations->SetOut(Location::SameAsFirstInput());
4263}
4264
4265void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004266 LocationSummary* locations = bool_not->GetLocations();
4267 Location in = locations->InAt(0);
4268 Location out = locations->Out();
4269 DCHECK(in.Equals(out));
4270 __ xorl(out.AsRegister<Register>(), Immediate(1));
4271}
4272
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004273void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004274 LocationSummary* locations =
4275 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004276 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004277 case Primitive::kPrimBoolean:
4278 case Primitive::kPrimByte:
4279 case Primitive::kPrimShort:
4280 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004281 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00004282 case Primitive::kPrimLong: {
4283 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004284 locations->SetInAt(1, Location::Any());
4285 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4286 break;
4287 }
4288 case Primitive::kPrimFloat:
4289 case Primitive::kPrimDouble: {
4290 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004291 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4292 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4293 } else if (compare->InputAt(1)->IsConstant()) {
4294 locations->SetInAt(1, Location::RequiresFpuRegister());
4295 } else {
4296 locations->SetInAt(1, Location::Any());
4297 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004298 locations->SetOut(Location::RequiresRegister());
4299 break;
4300 }
4301 default:
4302 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4303 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004304}
4305
4306void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004307 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004308 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004309 Location left = locations->InAt(0);
4310 Location right = locations->InAt(1);
4311
Mark Mendell0c9497d2015-08-21 09:30:05 -04004312 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004313 Condition less_cond = kLess;
4314
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004315 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004316 case Primitive::kPrimBoolean:
4317 case Primitive::kPrimByte:
4318 case Primitive::kPrimShort:
4319 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004320 case Primitive::kPrimInt: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004321 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004322 break;
4323 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004324 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004325 Register left_low = left.AsRegisterPairLow<Register>();
4326 Register left_high = left.AsRegisterPairHigh<Register>();
4327 int32_t val_low = 0;
4328 int32_t val_high = 0;
4329 bool right_is_const = false;
4330
4331 if (right.IsConstant()) {
4332 DCHECK(right.GetConstant()->IsLongConstant());
4333 right_is_const = true;
4334 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4335 val_low = Low32Bits(val);
4336 val_high = High32Bits(val);
4337 }
4338
Calin Juravleddb7df22014-11-25 20:56:51 +00004339 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004340 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004341 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004342 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004343 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004344 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004345 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004346 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004347 __ j(kLess, &less); // Signed compare.
4348 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004349 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004350 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004351 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004352 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004353 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004354 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004355 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004356 }
Aart Bika19616e2016-02-01 18:57:58 -08004357 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004358 break;
4359 }
4360 case Primitive::kPrimFloat: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004361 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004362 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004363 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004364 break;
4365 }
4366 case Primitive::kPrimDouble: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004367 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004368 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004369 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004370 break;
4371 }
4372 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004373 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004374 }
Aart Bika19616e2016-02-01 18:57:58 -08004375
Calin Juravleddb7df22014-11-25 20:56:51 +00004376 __ movl(out, Immediate(0));
4377 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004378 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004379
4380 __ Bind(&greater);
4381 __ movl(out, Immediate(1));
4382 __ jmp(&done);
4383
4384 __ Bind(&less);
4385 __ movl(out, Immediate(-1));
4386
4387 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004388}
4389
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004390void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004391 LocationSummary* locations =
4392 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004393 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004394 locations->SetInAt(i, Location::Any());
4395 }
4396 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004397}
4398
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004399void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004400 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004401}
4402
Roland Levillain7c1559a2015-12-15 10:55:36 +00004403void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004404 /*
4405 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4406 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4407 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4408 */
4409 switch (kind) {
4410 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004411 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004412 break;
4413 }
4414 case MemBarrierKind::kAnyStore:
4415 case MemBarrierKind::kLoadAny:
4416 case MemBarrierKind::kStoreStore: {
4417 // nop
4418 break;
4419 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004420 case MemBarrierKind::kNTStoreStore:
4421 // Non-Temporal Store/Store needs an explicit fence.
4422 MemoryFence(/* non-temporal */ true);
4423 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004424 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004425}
4426
Vladimir Markodc151b22015-10-15 18:02:30 +01004427HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4428 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004429 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004430 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
4431
4432 // We disable pc-relative load when there is an irreducible loop, as the optimization
4433 // is incompatible with it.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004434 // TODO: Create as many X86ComputeBaseMethodAddress instructions
4435 // as needed for methods with irreducible loops.
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004436 if (GetGraph()->HasIrreducibleLoops() &&
4437 (dispatch_info.method_load_kind ==
4438 HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative)) {
4439 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
4440 }
4441 switch (dispatch_info.code_ptr_location) {
Vladimir Markodc151b22015-10-15 18:02:30 +01004442 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
4443 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
4444 // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
4445 // (Though the direct CALL ptr16:32 is available for consideration).
4446 return HInvokeStaticOrDirect::DispatchInfo {
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004447 dispatch_info.method_load_kind,
Vladimir Markodc151b22015-10-15 18:02:30 +01004448 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004449 dispatch_info.method_load_data,
Vladimir Markodc151b22015-10-15 18:02:30 +01004450 0u
4451 };
4452 default:
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004453 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004454 }
4455}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004456
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004457Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4458 Register temp) {
4459 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004460 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004461 if (!invoke->GetLocations()->Intrinsified()) {
4462 return location.AsRegister<Register>();
4463 }
4464 // For intrinsics we allow any location, so it may be on the stack.
4465 if (!location.IsRegister()) {
4466 __ movl(temp, Address(ESP, location.GetStackIndex()));
4467 return temp;
4468 }
4469 // For register locations, check if the register was saved. If so, get it from the stack.
4470 // Note: There is a chance that the register was saved but not overwritten, so we could
4471 // save one load. However, since this is just an intrinsic slow path we prefer this
4472 // simple and more robust approach rather that trying to determine if that's the case.
4473 SlowPathCode* slow_path = GetCurrentSlowPath();
Serguei Katkov288c7a82016-05-16 11:53:15 +06004474 if (slow_path != nullptr) {
4475 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4476 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4477 __ movl(temp, Address(ESP, stack_offset));
4478 return temp;
4479 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004480 }
4481 return location.AsRegister<Register>();
4482}
4483
Serguei Katkov288c7a82016-05-16 11:53:15 +06004484Location CodeGeneratorX86::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4485 Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00004486 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4487 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004488 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004489 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004490 uint32_t offset =
4491 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4492 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004493 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004494 }
Vladimir Marko58155012015-08-19 12:49:41 +00004495 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004496 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004497 break;
4498 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4499 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4500 break;
4501 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004502 __ movl(temp.AsRegister<Register>(), Immediate(/* placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004503 method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
4504 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00004505 __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
4506 break;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004507 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4508 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4509 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004510 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004511 // Bind a new fixup label at the end of the "movl" insn.
4512 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004513 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004514 break;
4515 }
Vladimir Marko58155012015-08-19 12:49:41 +00004516 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004517 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004518 Register method_reg;
4519 Register reg = temp.AsRegister<Register>();
4520 if (current_method.IsRegister()) {
4521 method_reg = current_method.AsRegister<Register>();
4522 } else {
David Brazdil58282f42016-01-14 12:45:10 +00004523 DCHECK(invoke->GetLocations()->Intrinsified());
Vladimir Marko58155012015-08-19 12:49:41 +00004524 DCHECK(!current_method.IsValid());
4525 method_reg = reg;
4526 __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
4527 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004528 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004529 __ movl(reg, Address(method_reg,
4530 ArtMethod::DexCacheResolvedMethodsOffset(kX86PointerSize).Int32Value()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01004531 // temp = temp[index_in_cache];
4532 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4533 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004534 __ movl(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
4535 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004536 }
Vladimir Marko58155012015-08-19 12:49:41 +00004537 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06004538 return callee_method;
4539}
4540
4541void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4542 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004543
4544 switch (invoke->GetCodePtrLocation()) {
4545 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4546 __ call(GetFrameEntryLabel());
4547 break;
4548 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004549 relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
4550 invoke->GetTargetMethod().dex_method_index);
Vladimir Marko58155012015-08-19 12:49:41 +00004551 Label* label = &relative_call_patches_.back().label;
4552 __ call(label); // Bind to the patch label, override at link time.
4553 __ Bind(label); // Bind the label at the end of the "call" insn.
4554 break;
4555 }
4556 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
4557 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Vladimir Markodc151b22015-10-15 18:02:30 +01004558 // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
4559 LOG(FATAL) << "Unsupported";
4560 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00004561 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4562 // (callee_method + offset_of_quick_compiled_code)()
4563 __ call(Address(callee_method.AsRegister<Register>(),
4564 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004565 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004566 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004567 }
4568
4569 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004570}
4571
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004572void CodeGeneratorX86::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
4573 Register temp = temp_in.AsRegister<Register>();
4574 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4575 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004576
4577 // Use the calling convention instead of the location of the receiver, as
4578 // intrinsics may have put the receiver in a different register. In the intrinsics
4579 // slow path, the arguments have been moved to the right place, so here we are
4580 // guaranteed that the receiver is the first register of the calling convention.
4581 InvokeDexCallingConvention calling_convention;
4582 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004583 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004584 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004585 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004586 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004587 // Instead of simply (possibly) unpoisoning `temp` here, we should
4588 // emit a read barrier for the previous class reference load.
4589 // However this is not required in practice, as this is an
4590 // intermediate/temporary reference and because the current
4591 // concurrent copying collector keeps the from-space memory
4592 // intact/accessible until the end of the marking phase (the
4593 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004594 __ MaybeUnpoisonHeapReference(temp);
4595 // temp = temp->GetMethodAt(method_offset);
4596 __ movl(temp, Address(temp, method_offset));
4597 // call temp->GetEntryPoint();
4598 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004599 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004600}
4601
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004602void CodeGeneratorX86::RecordSimplePatch() {
4603 if (GetCompilerOptions().GetIncludePatchInformation()) {
4604 simple_patches_.emplace_back();
4605 __ Bind(&simple_patches_.back());
4606 }
4607}
4608
Vladimir Markoaad75c62016-10-03 08:46:48 +00004609void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
4610 DCHECK(GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004611 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
4612 __ Bind(&string_patches_.back().label);
4613}
4614
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004615void CodeGeneratorX86::RecordTypePatch(HLoadClass* load_class) {
4616 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex());
4617 __ Bind(&type_patches_.back().label);
4618}
4619
Vladimir Markoaad75c62016-10-03 08:46:48 +00004620Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
4621 DCHECK(!GetCompilerOptions().IsBootImage());
4622 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex());
4623 return &string_patches_.back().label;
4624}
4625
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004626Label* CodeGeneratorX86::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
4627 uint32_t element_offset) {
4628 // Add the patch entry and bind its label at the end of the instruction.
4629 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
4630 return &pc_relative_dex_cache_patches_.back().label;
4631}
4632
Vladimir Markoaad75c62016-10-03 08:46:48 +00004633// The label points to the end of the "movl" or another instruction but the literal offset
4634// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
4635constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
4636
4637template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4638inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
4639 const ArenaDeque<PatchInfo<Label>>& infos,
4640 ArenaVector<LinkerPatch>* linker_patches) {
4641 for (const PatchInfo<Label>& info : infos) {
4642 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4643 linker_patches->push_back(
4644 Factory(literal_offset, &info.dex_file, GetMethodAddressOffset(), info.index));
4645 }
4646}
4647
Vladimir Marko58155012015-08-19 12:49:41 +00004648void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4649 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004650 size_t size =
4651 method_patches_.size() +
4652 relative_call_patches_.size() +
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004653 pc_relative_dex_cache_patches_.size() +
4654 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004655 string_patches_.size() +
4656 type_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004657 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004658 for (const PatchInfo<Label>& info : method_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004659 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004660 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00004661 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004662 for (const PatchInfo<Label>& info : relative_call_patches_) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004663 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004664 linker_patches->push_back(
4665 LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
Vladimir Marko58155012015-08-19 12:49:41 +00004666 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004667 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
4668 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004669 for (const Label& label : simple_patches_) {
4670 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4671 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
4672 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004673 if (!GetCompilerOptions().IsBootImage()) {
4674 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
4675 } else if (GetCompilerOptions().GetCompilePic()) {
4676 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004677 } else {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004678 for (const PatchInfo<Label>& info : string_patches_) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004679 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004680 linker_patches->push_back(
4681 LinkerPatch::StringPatch(literal_offset, &info.dex_file, info.index));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004682 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004683 }
4684 if (GetCompilerOptions().GetCompilePic()) {
4685 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
4686 } else {
4687 for (const PatchInfo<Label>& info : type_patches_) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004688 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004689 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004690 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004691 }
Vladimir Marko58155012015-08-19 12:49:41 +00004692}
4693
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004694void CodeGeneratorX86::MarkGCCard(Register temp,
4695 Register card,
4696 Register object,
4697 Register value,
4698 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004699 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004700 if (value_can_be_null) {
4701 __ testl(value, value);
4702 __ j(kEqual, &is_null);
4703 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004704 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004705 __ movl(temp, object);
4706 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00004707 __ movb(Address(temp, card, TIMES_1, 0),
4708 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004709 if (value_can_be_null) {
4710 __ Bind(&is_null);
4711 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004712}
4713
Calin Juravle52c48962014-12-16 17:02:57 +00004714void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
4715 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00004716
4717 bool object_field_get_with_read_barrier =
4718 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004719 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004720 new (GetGraph()->GetArena()) LocationSummary(instruction,
4721 kEmitCompilerReadBarrier ?
4722 LocationSummary::kCallOnSlowPath :
4723 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004724 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004725 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004726 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004727 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004728
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004729 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4730 locations->SetOut(Location::RequiresFpuRegister());
4731 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004732 // The output overlaps in case of long: we don't want the low move
4733 // to overwrite the object's location. Likewise, in the case of
4734 // an object field get with read barriers enabled, we do not want
4735 // the move to overwrite the object's location, as we need it to emit
4736 // the read barrier.
4737 locations->SetOut(
4738 Location::RequiresRegister(),
4739 (object_field_get_with_read_barrier || instruction->GetType() == Primitive::kPrimLong) ?
4740 Location::kOutputOverlap :
4741 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004742 }
Calin Juravle52c48962014-12-16 17:02:57 +00004743
4744 if (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) {
4745 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00004746 // So we use an XMM register as a temp to achieve atomicity (first
4747 // load the temp into the XMM and then copy the XMM into the
4748 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00004749 locations->AddTemp(Location::RequiresFpuRegister());
4750 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004751}
4752
Calin Juravle52c48962014-12-16 17:02:57 +00004753void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
4754 const FieldInfo& field_info) {
4755 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004756
Calin Juravle52c48962014-12-16 17:02:57 +00004757 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004758 Location base_loc = locations->InAt(0);
4759 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00004760 Location out = locations->Out();
4761 bool is_volatile = field_info.IsVolatile();
4762 Primitive::Type field_type = field_info.GetFieldType();
4763 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4764
4765 switch (field_type) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004766 case Primitive::kPrimBoolean: {
Calin Juravle52c48962014-12-16 17:02:57 +00004767 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004768 break;
4769 }
4770
4771 case Primitive::kPrimByte: {
Calin Juravle52c48962014-12-16 17:02:57 +00004772 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004773 break;
4774 }
4775
4776 case Primitive::kPrimShort: {
Calin Juravle52c48962014-12-16 17:02:57 +00004777 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004778 break;
4779 }
4780
4781 case Primitive::kPrimChar: {
Calin Juravle52c48962014-12-16 17:02:57 +00004782 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004783 break;
4784 }
4785
4786 case Primitive::kPrimInt:
Calin Juravle52c48962014-12-16 17:02:57 +00004787 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004788 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00004789
4790 case Primitive::kPrimNot: {
4791 // /* HeapReference<Object> */ out = *(base + offset)
4792 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004793 // Note that a potential implicit null check is handled in this
4794 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4795 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004796 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00004797 if (is_volatile) {
4798 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4799 }
4800 } else {
4801 __ movl(out.AsRegister<Register>(), Address(base, offset));
4802 codegen_->MaybeRecordImplicitNullCheck(instruction);
4803 if (is_volatile) {
4804 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4805 }
4806 // If read barriers are enabled, emit read barriers other than
4807 // Baker's using a slow path (and also unpoison the loaded
4808 // reference, if heap poisoning is enabled).
4809 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4810 }
4811 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004812 }
4813
4814 case Primitive::kPrimLong: {
Calin Juravle52c48962014-12-16 17:02:57 +00004815 if (is_volatile) {
4816 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4817 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004818 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004819 __ movd(out.AsRegisterPairLow<Register>(), temp);
4820 __ psrlq(temp, Immediate(32));
4821 __ movd(out.AsRegisterPairHigh<Register>(), temp);
4822 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004823 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00004824 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004825 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004826 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
4827 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004828 break;
4829 }
4830
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004831 case Primitive::kPrimFloat: {
Calin Juravle52c48962014-12-16 17:02:57 +00004832 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004833 break;
4834 }
4835
4836 case Primitive::kPrimDouble: {
Calin Juravle52c48962014-12-16 17:02:57 +00004837 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004838 break;
4839 }
4840
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004841 case Primitive::kPrimVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00004842 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004843 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004844 }
Calin Juravle52c48962014-12-16 17:02:57 +00004845
Roland Levillain7c1559a2015-12-15 10:55:36 +00004846 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimLong) {
4847 // Potential implicit null checks, in the case of reference or
4848 // long fields, are handled in the previous switch statement.
4849 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00004850 codegen_->MaybeRecordImplicitNullCheck(instruction);
4851 }
4852
Calin Juravle52c48962014-12-16 17:02:57 +00004853 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004854 if (field_type == Primitive::kPrimNot) {
4855 // Memory barriers, in the case of references, are also handled
4856 // in the previous switch statement.
4857 } else {
4858 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4859 }
Roland Levillain4d027112015-07-01 15:41:14 +01004860 }
Calin Juravle52c48962014-12-16 17:02:57 +00004861}
4862
4863void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
4864 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4865
4866 LocationSummary* locations =
4867 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4868 locations->SetInAt(0, Location::RequiresRegister());
4869 bool is_volatile = field_info.IsVolatile();
4870 Primitive::Type field_type = field_info.GetFieldType();
4871 bool is_byte_type = (field_type == Primitive::kPrimBoolean)
4872 || (field_type == Primitive::kPrimByte);
4873
4874 // The register allocator does not support multiple
4875 // inputs that die at entry with one in a specific register.
4876 if (is_byte_type) {
4877 // Ensure the value is in a byte register.
4878 locations->SetInAt(1, Location::RegisterLocation(EAX));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004879 } else if (Primitive::IsFloatingPointType(field_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05004880 if (is_volatile && field_type == Primitive::kPrimDouble) {
4881 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4882 locations->SetInAt(1, Location::RequiresFpuRegister());
4883 } else {
4884 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4885 }
4886 } else if (is_volatile && field_type == Primitive::kPrimLong) {
4887 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00004888 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004889
Calin Juravle52c48962014-12-16 17:02:57 +00004890 // 64bits value can be atomically written to an address with movsd and an XMM register.
4891 // We need two XMM registers because there's no easier way to (bit) copy a register pair
4892 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
4893 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
4894 // isolated cases when we need this it isn't worth adding the extra complexity.
4895 locations->AddTemp(Location::RequiresFpuRegister());
4896 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004897 } else {
4898 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4899
4900 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4901 // Temporary registers for the write barrier.
4902 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
4903 // Ensure the card is in a byte register.
4904 locations->AddTemp(Location::RegisterLocation(ECX));
4905 }
Calin Juravle52c48962014-12-16 17:02:57 +00004906 }
4907}
4908
4909void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004910 const FieldInfo& field_info,
4911 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004912 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4913
4914 LocationSummary* locations = instruction->GetLocations();
4915 Register base = locations->InAt(0).AsRegister<Register>();
4916 Location value = locations->InAt(1);
4917 bool is_volatile = field_info.IsVolatile();
4918 Primitive::Type field_type = field_info.GetFieldType();
4919 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01004920 bool needs_write_barrier =
4921 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004922
4923 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004924 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004925 }
4926
Mark Mendell81489372015-11-04 11:30:41 -05004927 bool maybe_record_implicit_null_check_done = false;
4928
Calin Juravle52c48962014-12-16 17:02:57 +00004929 switch (field_type) {
4930 case Primitive::kPrimBoolean:
4931 case Primitive::kPrimByte: {
4932 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
4933 break;
4934 }
4935
4936 case Primitive::kPrimShort:
4937 case Primitive::kPrimChar: {
Mark Mendell81489372015-11-04 11:30:41 -05004938 if (value.IsConstant()) {
4939 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4940 __ movw(Address(base, offset), Immediate(v));
4941 } else {
4942 __ movw(Address(base, offset), value.AsRegister<Register>());
4943 }
Calin Juravle52c48962014-12-16 17:02:57 +00004944 break;
4945 }
4946
4947 case Primitive::kPrimInt:
4948 case Primitive::kPrimNot: {
Roland Levillain4d027112015-07-01 15:41:14 +01004949 if (kPoisonHeapReferences && needs_write_barrier) {
4950 // Note that in the case where `value` is a null reference,
4951 // we do not enter this block, as the reference does not
4952 // need poisoning.
4953 DCHECK_EQ(field_type, Primitive::kPrimNot);
4954 Register temp = locations->GetTemp(0).AsRegister<Register>();
4955 __ movl(temp, value.AsRegister<Register>());
4956 __ PoisonHeapReference(temp);
4957 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05004958 } else if (value.IsConstant()) {
4959 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4960 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01004961 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00004962 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01004963 __ movl(Address(base, offset), value.AsRegister<Register>());
4964 }
Calin Juravle52c48962014-12-16 17:02:57 +00004965 break;
4966 }
4967
4968 case Primitive::kPrimLong: {
4969 if (is_volatile) {
4970 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4971 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
4972 __ movd(temp1, value.AsRegisterPairLow<Register>());
4973 __ movd(temp2, value.AsRegisterPairHigh<Register>());
4974 __ punpckldq(temp1, temp2);
4975 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00004976 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05004977 } else if (value.IsConstant()) {
4978 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
4979 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
4980 codegen_->MaybeRecordImplicitNullCheck(instruction);
4981 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00004982 } else {
4983 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00004984 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004985 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
4986 }
Mark Mendell81489372015-11-04 11:30:41 -05004987 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00004988 break;
4989 }
4990
4991 case Primitive::kPrimFloat: {
Mark Mendell81489372015-11-04 11:30:41 -05004992 if (value.IsConstant()) {
4993 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4994 __ movl(Address(base, offset), Immediate(v));
4995 } else {
4996 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4997 }
Calin Juravle52c48962014-12-16 17:02:57 +00004998 break;
4999 }
5000
5001 case Primitive::kPrimDouble: {
Mark Mendell81489372015-11-04 11:30:41 -05005002 if (value.IsConstant()) {
5003 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5004 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5005 codegen_->MaybeRecordImplicitNullCheck(instruction);
5006 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
5007 maybe_record_implicit_null_check_done = true;
5008 } else {
5009 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5010 }
Calin Juravle52c48962014-12-16 17:02:57 +00005011 break;
5012 }
5013
5014 case Primitive::kPrimVoid:
5015 LOG(FATAL) << "Unreachable type " << field_type;
5016 UNREACHABLE();
5017 }
5018
Mark Mendell81489372015-11-04 11:30:41 -05005019 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005020 codegen_->MaybeRecordImplicitNullCheck(instruction);
5021 }
5022
Roland Levillain4d027112015-07-01 15:41:14 +01005023 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005024 Register temp = locations->GetTemp(0).AsRegister<Register>();
5025 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005026 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005027 }
5028
Calin Juravle52c48962014-12-16 17:02:57 +00005029 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005030 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005031 }
5032}
5033
5034void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5035 HandleFieldGet(instruction, instruction->GetFieldInfo());
5036}
5037
5038void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5039 HandleFieldGet(instruction, instruction->GetFieldInfo());
5040}
5041
5042void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5043 HandleFieldSet(instruction, instruction->GetFieldInfo());
5044}
5045
5046void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005047 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005048}
5049
5050void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5051 HandleFieldSet(instruction, instruction->GetFieldInfo());
5052}
5053
5054void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005055 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005056}
5057
5058void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5059 HandleFieldGet(instruction, instruction->GetFieldInfo());
5060}
5061
5062void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5063 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005064}
5065
Calin Juravlee460d1d2015-09-29 04:52:17 +01005066void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5067 HUnresolvedInstanceFieldGet* instruction) {
5068 FieldAccessCallingConventionX86 calling_convention;
5069 codegen_->CreateUnresolvedFieldLocationSummary(
5070 instruction, instruction->GetFieldType(), calling_convention);
5071}
5072
5073void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5074 HUnresolvedInstanceFieldGet* instruction) {
5075 FieldAccessCallingConventionX86 calling_convention;
5076 codegen_->GenerateUnresolvedFieldAccess(instruction,
5077 instruction->GetFieldType(),
5078 instruction->GetFieldIndex(),
5079 instruction->GetDexPc(),
5080 calling_convention);
5081}
5082
5083void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5084 HUnresolvedInstanceFieldSet* instruction) {
5085 FieldAccessCallingConventionX86 calling_convention;
5086 codegen_->CreateUnresolvedFieldLocationSummary(
5087 instruction, instruction->GetFieldType(), calling_convention);
5088}
5089
5090void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5091 HUnresolvedInstanceFieldSet* instruction) {
5092 FieldAccessCallingConventionX86 calling_convention;
5093 codegen_->GenerateUnresolvedFieldAccess(instruction,
5094 instruction->GetFieldType(),
5095 instruction->GetFieldIndex(),
5096 instruction->GetDexPc(),
5097 calling_convention);
5098}
5099
5100void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5101 HUnresolvedStaticFieldGet* instruction) {
5102 FieldAccessCallingConventionX86 calling_convention;
5103 codegen_->CreateUnresolvedFieldLocationSummary(
5104 instruction, instruction->GetFieldType(), calling_convention);
5105}
5106
5107void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5108 HUnresolvedStaticFieldGet* instruction) {
5109 FieldAccessCallingConventionX86 calling_convention;
5110 codegen_->GenerateUnresolvedFieldAccess(instruction,
5111 instruction->GetFieldType(),
5112 instruction->GetFieldIndex(),
5113 instruction->GetDexPc(),
5114 calling_convention);
5115}
5116
5117void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5118 HUnresolvedStaticFieldSet* instruction) {
5119 FieldAccessCallingConventionX86 calling_convention;
5120 codegen_->CreateUnresolvedFieldLocationSummary(
5121 instruction, instruction->GetFieldType(), calling_convention);
5122}
5123
5124void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5125 HUnresolvedStaticFieldSet* instruction) {
5126 FieldAccessCallingConventionX86 calling_convention;
5127 codegen_->GenerateUnresolvedFieldAccess(instruction,
5128 instruction->GetFieldType(),
5129 instruction->GetFieldIndex(),
5130 instruction->GetDexPc(),
5131 calling_convention);
5132}
5133
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005134void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005135 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5136 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5137 ? Location::RequiresRegister()
5138 : Location::Any();
5139 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005140}
5141
Calin Juravle2ae48182016-03-16 14:05:09 +00005142void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5143 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005144 return;
5145 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005146 LocationSummary* locations = instruction->GetLocations();
5147 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005148
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005149 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005150 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005151}
5152
Calin Juravle2ae48182016-03-16 14:05:09 +00005153void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07005154 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005155 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005156
5157 LocationSummary* locations = instruction->GetLocations();
5158 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005159
5160 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005161 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005162 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005163 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005164 } else {
5165 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005166 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005167 __ jmp(slow_path->GetEntryLabel());
5168 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005169 }
5170 __ j(kEqual, slow_path->GetEntryLabel());
5171}
5172
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005173void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005174 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005175}
5176
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005177void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005178 bool object_array_get_with_read_barrier =
5179 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005180 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00005181 new (GetGraph()->GetArena()) LocationSummary(instruction,
5182 object_array_get_with_read_barrier ?
5183 LocationSummary::kCallOnSlowPath :
5184 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005185 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005186 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005187 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005188 locations->SetInAt(0, Location::RequiresRegister());
5189 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005190 if (Primitive::IsFloatingPointType(instruction->GetType())) {
5191 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5192 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005193 // The output overlaps in case of long: we don't want the low move
5194 // to overwrite the array's location. Likewise, in the case of an
5195 // object array get with read barriers enabled, we do not want the
5196 // move to overwrite the array's location, as we need it to emit
5197 // the read barrier.
5198 locations->SetOut(
5199 Location::RequiresRegister(),
5200 (instruction->GetType() == Primitive::kPrimLong || object_array_get_with_read_barrier) ?
5201 Location::kOutputOverlap :
5202 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005203 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005204}
5205
5206void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5207 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005208 Location obj_loc = locations->InAt(0);
5209 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005210 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005211 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005212 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005213
Calin Juravle77520bc2015-01-12 18:45:46 +00005214 Primitive::Type type = instruction->GetType();
5215 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005216 case Primitive::kPrimBoolean: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005217 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005218 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005219 break;
5220 }
5221
5222 case Primitive::kPrimByte: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005223 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005224 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005225 break;
5226 }
5227
5228 case Primitive::kPrimShort: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005229 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005230 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005231 break;
5232 }
5233
5234 case Primitive::kPrimChar: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005235 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005236 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5237 // Branch cases into compressed and uncompressed for each index's type.
5238 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5239 NearLabel done, not_compressed;
5240 __ cmpl(Address(obj, count_offset), Immediate(0));
5241 codegen_->MaybeRecordImplicitNullCheck(instruction);
5242 __ j(kGreaterEqual, &not_compressed);
5243 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5244 __ jmp(&done);
5245 __ Bind(&not_compressed);
5246 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5247 __ Bind(&done);
5248 } else {
5249 // Common case for charAt of array of char or when string compression's
5250 // feature is turned off.
5251 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5252 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005253 break;
5254 }
5255
Roland Levillain7c1559a2015-12-15 10:55:36 +00005256 case Primitive::kPrimInt: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005257 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005258 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005259 break;
5260 }
5261
Roland Levillain7c1559a2015-12-15 10:55:36 +00005262 case Primitive::kPrimNot: {
5263 static_assert(
5264 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5265 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005266 // /* HeapReference<Object> */ out =
5267 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5268 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005269 // Note that a potential implicit null check is handled in this
5270 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5271 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00005272 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005273 } else {
5274 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005275 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5276 codegen_->MaybeRecordImplicitNullCheck(instruction);
5277 // If read barriers are enabled, emit read barriers other than
5278 // Baker's using a slow path (and also unpoison the loaded
5279 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005280 if (index.IsConstant()) {
5281 uint32_t offset =
5282 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005283 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5284 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005285 codegen_->MaybeGenerateReadBarrierSlow(
5286 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5287 }
5288 }
5289 break;
5290 }
5291
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005292 case Primitive::kPrimLong: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005293 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005294 __ movl(out_loc.AsRegisterPairLow<Register>(),
5295 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5296 codegen_->MaybeRecordImplicitNullCheck(instruction);
5297 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5298 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005299 break;
5300 }
5301
Mark Mendell7c8d0092015-01-26 11:21:33 -05005302 case Primitive::kPrimFloat: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005303 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005304 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005305 break;
5306 }
5307
5308 case Primitive::kPrimDouble: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005309 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005310 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005311 break;
5312 }
5313
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005314 case Primitive::kPrimVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005315 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005316 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005317 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005318
Roland Levillain7c1559a2015-12-15 10:55:36 +00005319 if (type == Primitive::kPrimNot || type == Primitive::kPrimLong) {
5320 // Potential implicit null checks, in the case of reference or
5321 // long arrays, are handled in the previous switch statement.
5322 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005323 codegen_->MaybeRecordImplicitNullCheck(instruction);
5324 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005325}
5326
5327void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005328 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005329
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005330 bool needs_write_barrier =
5331 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005332 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005333
Nicolas Geoffray39468442014-09-02 15:17:15 +01005334 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
5335 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01005336 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005337 LocationSummary::kCallOnSlowPath :
5338 LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005339
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005340 bool is_byte_type = (value_type == Primitive::kPrimBoolean)
5341 || (value_type == Primitive::kPrimByte);
5342 // We need the inputs to be different than the output in case of long operation.
5343 // In case of a byte operation, the register allocator does not support multiple
5344 // inputs that die at entry with one in a specific register.
5345 locations->SetInAt(0, Location::RequiresRegister());
5346 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5347 if (is_byte_type) {
5348 // Ensure the value is in a byte register.
5349 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
5350 } else if (Primitive::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005351 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005352 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005353 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5354 }
5355 if (needs_write_barrier) {
5356 // Temporary registers for the write barrier.
5357 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5358 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005359 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005360 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005361}
5362
5363void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5364 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005365 Location array_loc = locations->InAt(0);
5366 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005367 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005368 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005369 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005370 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5371 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5372 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005373 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005374 bool needs_write_barrier =
5375 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005376
5377 switch (value_type) {
5378 case Primitive::kPrimBoolean:
5379 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005380 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005381 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005382 if (value.IsRegister()) {
5383 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005384 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005385 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005386 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005387 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005388 break;
5389 }
5390
5391 case Primitive::kPrimShort:
5392 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005393 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005394 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005395 if (value.IsRegister()) {
5396 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005397 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005398 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005399 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005400 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005401 break;
5402 }
5403
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005404 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005405 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005406 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005407
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005408 if (!value.IsRegister()) {
5409 // Just setting null.
5410 DCHECK(instruction->InputAt(2)->IsNullConstant());
5411 DCHECK(value.IsConstant()) << value;
5412 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005413 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005414 DCHECK(!needs_write_barrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005415 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005416 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005417 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005418
5419 DCHECK(needs_write_barrier);
5420 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005421 // We cannot use a NearLabel for `done`, as its range may be too
5422 // short when Baker read barriers are enabled.
5423 Label done;
5424 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005425 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01005426 Location temp_loc = locations->GetTemp(0);
5427 Register temp = temp_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005428 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005429 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86(instruction);
5430 codegen_->AddSlowPath(slow_path);
5431 if (instruction->GetValueCanBeNull()) {
5432 __ testl(register_value, register_value);
5433 __ j(kNotEqual, &not_null);
5434 __ movl(address, Immediate(0));
5435 codegen_->MaybeRecordImplicitNullCheck(instruction);
5436 __ jmp(&done);
5437 __ Bind(&not_null);
5438 }
5439
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005440 // Note that when Baker read barriers are enabled, the type
5441 // checks are performed without read barriers. This is fine,
5442 // even in the case where a class object is in the from-space
5443 // after the flip, as a comparison involving such a type would
5444 // not produce a false positive; it may of course produce a
5445 // false negative, in which case we would take the ArraySet
5446 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005447
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005448 // /* HeapReference<Class> */ temp = array->klass_
5449 __ movl(temp, Address(array, class_offset));
5450 codegen_->MaybeRecordImplicitNullCheck(instruction);
5451 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005452
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005453 // /* HeapReference<Class> */ temp = temp->component_type_
5454 __ movl(temp, Address(temp, component_offset));
5455 // If heap poisoning is enabled, no need to unpoison `temp`
5456 // nor the object reference in `register_value->klass`, as
5457 // we are comparing two poisoned references.
5458 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005459
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005460 if (instruction->StaticTypeOfArrayIsObjectArray()) {
5461 __ j(kEqual, &do_put);
5462 // If heap poisoning is enabled, the `temp` reference has
5463 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005464 __ MaybeUnpoisonHeapReference(temp);
5465
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005466 // If heap poisoning is enabled, no need to unpoison the
5467 // heap reference loaded below, as it is only used for a
5468 // comparison with null.
5469 __ cmpl(Address(temp, super_offset), Immediate(0));
5470 __ j(kNotEqual, slow_path->GetEntryLabel());
5471 __ Bind(&do_put);
5472 } else {
5473 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005474 }
5475 }
5476
5477 if (kPoisonHeapReferences) {
5478 __ movl(temp, register_value);
5479 __ PoisonHeapReference(temp);
5480 __ movl(address, temp);
5481 } else {
5482 __ movl(address, register_value);
5483 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005484 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005485 codegen_->MaybeRecordImplicitNullCheck(instruction);
5486 }
5487
5488 Register card = locations->GetTemp(1).AsRegister<Register>();
5489 codegen_->MarkGCCard(
5490 temp, card, array, value.AsRegister<Register>(), instruction->GetValueCanBeNull());
5491 __ Bind(&done);
5492
5493 if (slow_path != nullptr) {
5494 __ Bind(slow_path->GetExitLabel());
5495 }
5496
5497 break;
5498 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005499
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005500 case Primitive::kPrimInt: {
5501 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005502 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005503 if (value.IsRegister()) {
5504 __ movl(address, value.AsRegister<Register>());
5505 } else {
5506 DCHECK(value.IsConstant()) << value;
5507 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5508 __ movl(address, Immediate(v));
5509 }
5510 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005511 break;
5512 }
5513
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005514 case Primitive::kPrimLong: {
5515 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005516 if (value.IsRegisterPair()) {
5517 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5518 value.AsRegisterPairLow<Register>());
5519 codegen_->MaybeRecordImplicitNullCheck(instruction);
5520 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5521 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005522 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005523 DCHECK(value.IsConstant());
5524 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
5525 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5526 Immediate(Low32Bits(val)));
5527 codegen_->MaybeRecordImplicitNullCheck(instruction);
5528 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5529 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005530 }
5531 break;
5532 }
5533
Mark Mendell7c8d0092015-01-26 11:21:33 -05005534 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005535 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005536 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005537 if (value.IsFpuRegister()) {
5538 __ movss(address, value.AsFpuRegister<XmmRegister>());
5539 } else {
5540 DCHECK(value.IsConstant());
5541 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
5542 __ movl(address, Immediate(v));
5543 }
5544 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005545 break;
5546 }
5547
5548 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005549 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005550 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005551 if (value.IsFpuRegister()) {
5552 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5553 } else {
5554 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005555 Address address_hi =
5556 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05005557 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5558 __ movl(address, Immediate(Low32Bits(v)));
5559 codegen_->MaybeRecordImplicitNullCheck(instruction);
5560 __ movl(address_hi, Immediate(High32Bits(v)));
5561 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005562 break;
5563 }
5564
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005565 case Primitive::kPrimVoid:
5566 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005567 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005568 }
5569}
5570
5571void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
5572 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005573 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005574 if (!instruction->IsEmittedAtUseSite()) {
5575 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5576 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005577}
5578
5579void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005580 if (instruction->IsEmittedAtUseSite()) {
5581 return;
5582 }
5583
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005584 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005585 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005586 Register obj = locations->InAt(0).AsRegister<Register>();
5587 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005588 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005589 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005590 // Mask out most significant bit in case the array is String's array of char.
5591 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
5592 __ andl(out, Immediate(INT32_MAX));
5593 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005594}
5595
5596void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005597 RegisterSet caller_saves = RegisterSet::Empty();
5598 InvokeRuntimeCallingConvention calling_convention;
5599 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5600 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5601 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005602 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005603 HInstruction* length = instruction->InputAt(1);
5604 if (!length->IsEmittedAtUseSite()) {
5605 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5606 }
jessicahandojo4877b792016-09-08 19:49:13 -07005607 // Need register to see array's length.
5608 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5609 locations->AddTemp(Location::RequiresRegister());
5610 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005611}
5612
5613void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07005614 const bool is_string_compressed_char_at =
5615 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005616 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005617 Location index_loc = locations->InAt(0);
5618 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005619 SlowPathCode* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005620 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005621
Mark Mendell99dbd682015-04-22 16:18:52 -04005622 if (length_loc.IsConstant()) {
5623 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5624 if (index_loc.IsConstant()) {
5625 // BCE will remove the bounds check if we are guarenteed to pass.
5626 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5627 if (index < 0 || index >= length) {
5628 codegen_->AddSlowPath(slow_path);
5629 __ jmp(slow_path->GetEntryLabel());
5630 } else {
5631 // Some optimization after BCE may have generated this, and we should not
5632 // generate a bounds check if it is a valid range.
5633 }
5634 return;
5635 }
5636
5637 // We have to reverse the jump condition because the length is the constant.
5638 Register index_reg = index_loc.AsRegister<Register>();
5639 __ cmpl(index_reg, Immediate(length));
5640 codegen_->AddSlowPath(slow_path);
5641 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005642 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005643 HInstruction* array_length = instruction->InputAt(1);
5644 if (array_length->IsEmittedAtUseSite()) {
5645 // Address the length field in the array.
5646 DCHECK(array_length->IsArrayLength());
5647 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5648 Location array_loc = array_length->GetLocations()->InAt(0);
5649 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005650 if (is_string_compressed_char_at) {
5651 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
5652 __ movl(length_reg, array_len);
5653 codegen_->MaybeRecordImplicitNullCheck(array_length);
5654 __ andl(length_reg, Immediate(INT32_MAX));
5655 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005656 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005657 // Checking bounds for general case:
5658 // Array of char or string's array with feature compression off.
5659 if (index_loc.IsConstant()) {
5660 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5661 __ cmpl(array_len, Immediate(value));
5662 } else {
5663 __ cmpl(array_len, index_loc.AsRegister<Register>());
5664 }
5665 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005666 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005667 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005668 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005669 }
5670 codegen_->AddSlowPath(slow_path);
5671 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005672 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005673}
5674
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005675void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005676 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01005677}
5678
5679void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005680 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5681}
5682
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005683void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005684 LocationSummary* locations =
5685 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005686 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005687}
5688
5689void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005690 HBasicBlock* block = instruction->GetBlock();
5691 if (block->GetLoopInformation() != nullptr) {
5692 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5693 // The back edge will generate the suspend check.
5694 return;
5695 }
5696 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5697 // The goto will generate the suspend check.
5698 return;
5699 }
5700 GenerateSuspendCheck(instruction, nullptr);
5701}
5702
5703void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
5704 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005705 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005706 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
5707 if (slow_path == nullptr) {
5708 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
5709 instruction->SetSlowPath(slow_path);
5710 codegen_->AddSlowPath(slow_path);
5711 if (successor != nullptr) {
5712 DCHECK(successor->IsLoopHeader());
5713 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5714 }
5715 } else {
5716 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5717 }
5718
Andreas Gampe542451c2016-07-26 09:02:02 -07005719 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00005720 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005721 if (successor == nullptr) {
5722 __ j(kNotEqual, slow_path->GetEntryLabel());
5723 __ Bind(slow_path->GetReturnLabel());
5724 } else {
5725 __ j(kEqual, codegen_->GetLabelOf(successor));
5726 __ jmp(slow_path->GetEntryLabel());
5727 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005728}
5729
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005730X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
5731 return codegen_->GetAssembler();
5732}
5733
Mark Mendell7c8d0092015-01-26 11:21:33 -05005734void ParallelMoveResolverX86::MoveMemoryToMemory32(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005735 ScratchRegisterScope ensure_scratch(
5736 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5737 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5738 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5739 __ movl(temp_reg, Address(ESP, src + stack_offset));
5740 __ movl(Address(ESP, dst + stack_offset), temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005741}
5742
5743void ParallelMoveResolverX86::MoveMemoryToMemory64(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005744 ScratchRegisterScope ensure_scratch(
5745 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5746 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5747 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5748 __ movl(temp_reg, Address(ESP, src + stack_offset));
5749 __ movl(Address(ESP, dst + stack_offset), temp_reg);
5750 __ movl(temp_reg, Address(ESP, src + stack_offset + kX86WordSize));
5751 __ movl(Address(ESP, dst + stack_offset + kX86WordSize), temp_reg);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005752}
5753
5754void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005755 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005756 Location source = move->GetSource();
5757 Location destination = move->GetDestination();
5758
5759 if (source.IsRegister()) {
5760 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005761 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005762 } else if (destination.IsFpuRegister()) {
5763 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005764 } else {
5765 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005766 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005767 }
David Brazdil74eb1b22015-12-14 11:44:01 +00005768 } else if (source.IsRegisterPair()) {
5769 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
5770 // Create stack space for 2 elements.
5771 __ subl(ESP, Immediate(2 * elem_size));
5772 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
5773 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
5774 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
5775 // And remove the temporary stack space we allocated.
5776 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005777 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005778 if (destination.IsRegister()) {
5779 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
5780 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005781 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005782 } else if (destination.IsRegisterPair()) {
5783 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
5784 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
5785 __ psrlq(src_reg, Immediate(32));
5786 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005787 } else if (destination.IsStackSlot()) {
5788 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
5789 } else {
5790 DCHECK(destination.IsDoubleStackSlot());
5791 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
5792 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005793 } else if (source.IsStackSlot()) {
5794 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005795 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005796 } else if (destination.IsFpuRegister()) {
5797 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005798 } else {
5799 DCHECK(destination.IsStackSlot());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005800 MoveMemoryToMemory32(destination.GetStackIndex(), source.GetStackIndex());
5801 }
5802 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005803 if (destination.IsRegisterPair()) {
5804 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
5805 __ movl(destination.AsRegisterPairHigh<Register>(),
5806 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
5807 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005808 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
5809 } else {
5810 DCHECK(destination.IsDoubleStackSlot()) << destination;
5811 MoveMemoryToMemory64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005812 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005813 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005814 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005815 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005816 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005817 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005818 if (value == 0) {
5819 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
5820 } else {
5821 __ movl(destination.AsRegister<Register>(), Immediate(value));
5822 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005823 } else {
5824 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05005825 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005826 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005827 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005828 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005829 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005830 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005831 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005832 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5833 if (value == 0) {
5834 // Easy handling of 0.0.
5835 __ xorps(dest, dest);
5836 } else {
5837 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005838 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5839 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
5840 __ movl(temp, Immediate(value));
5841 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005842 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005843 } else {
5844 DCHECK(destination.IsStackSlot()) << destination;
5845 __ movl(Address(ESP, destination.GetStackIndex()), imm);
5846 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005847 } else if (constant->IsLongConstant()) {
5848 int64_t value = constant->AsLongConstant()->GetValue();
5849 int32_t low_value = Low32Bits(value);
5850 int32_t high_value = High32Bits(value);
5851 Immediate low(low_value);
5852 Immediate high(high_value);
5853 if (destination.IsDoubleStackSlot()) {
5854 __ movl(Address(ESP, destination.GetStackIndex()), low);
5855 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5856 } else {
5857 __ movl(destination.AsRegisterPairLow<Register>(), low);
5858 __ movl(destination.AsRegisterPairHigh<Register>(), high);
5859 }
5860 } else {
5861 DCHECK(constant->IsDoubleConstant());
5862 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005863 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005864 int32_t low_value = Low32Bits(value);
5865 int32_t high_value = High32Bits(value);
5866 Immediate low(low_value);
5867 Immediate high(high_value);
5868 if (destination.IsFpuRegister()) {
5869 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5870 if (value == 0) {
5871 // Easy handling of 0.0.
5872 __ xorpd(dest, dest);
5873 } else {
5874 __ pushl(high);
5875 __ pushl(low);
5876 __ movsd(dest, Address(ESP, 0));
5877 __ addl(ESP, Immediate(8));
5878 }
5879 } else {
5880 DCHECK(destination.IsDoubleStackSlot()) << destination;
5881 __ movl(Address(ESP, destination.GetStackIndex()), low);
5882 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5883 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005884 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005885 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00005886 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005887 }
5888}
5889
Mark Mendella5c19ce2015-04-01 12:51:05 -04005890void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005891 Register suggested_scratch = reg == EAX ? EBX : EAX;
5892 ScratchRegisterScope ensure_scratch(
5893 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
5894
5895 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5896 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
5897 __ movl(Address(ESP, mem + stack_offset), reg);
5898 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005899}
5900
Mark Mendell7c8d0092015-01-26 11:21:33 -05005901void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005902 ScratchRegisterScope ensure_scratch(
5903 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5904
5905 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5906 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5907 __ movl(temp_reg, Address(ESP, mem + stack_offset));
5908 __ movss(Address(ESP, mem + stack_offset), reg);
5909 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005910}
5911
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005912void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005913 ScratchRegisterScope ensure_scratch1(
5914 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005915
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005916 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
5917 ScratchRegisterScope ensure_scratch2(
5918 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005919
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005920 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
5921 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
5922 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
5923 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
5924 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
5925 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005926}
5927
5928void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005929 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005930 Location source = move->GetSource();
5931 Location destination = move->GetDestination();
5932
5933 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04005934 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
5935 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
5936 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
5937 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
5938 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005939 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005940 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005941 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005942 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005943 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
5944 Exchange(destination.GetStackIndex(), source.GetStackIndex());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005945 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
5946 // Use XOR Swap algorithm to avoid a temporary.
5947 DCHECK_NE(source.reg(), destination.reg());
5948 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5949 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5950 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5951 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
5952 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5953 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
5954 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005955 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
5956 // Take advantage of the 16 bytes in the XMM register.
5957 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
5958 Address stack(ESP, destination.GetStackIndex());
5959 // Load the double into the high doubleword.
5960 __ movhpd(reg, stack);
5961
5962 // Store the low double into the destination.
5963 __ movsd(stack, reg);
5964
5965 // Move the high double to the low double.
5966 __ psrldq(reg, Immediate(8));
5967 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
5968 // Take advantage of the 16 bytes in the XMM register.
5969 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
5970 Address stack(ESP, source.GetStackIndex());
5971 // Load the double into the high doubleword.
5972 __ movhpd(reg, stack);
5973
5974 // Store the low double into the destination.
5975 __ movsd(stack, reg);
5976
5977 // Move the high double to the low double.
5978 __ psrldq(reg, Immediate(8));
5979 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
5980 Exchange(destination.GetStackIndex(), source.GetStackIndex());
5981 Exchange(destination.GetHighStackIndex(kX86WordSize), source.GetHighStackIndex(kX86WordSize));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005982 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005983 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005984 }
5985}
5986
5987void ParallelMoveResolverX86::SpillScratch(int reg) {
5988 __ pushl(static_cast<Register>(reg));
5989}
5990
5991void ParallelMoveResolverX86::RestoreScratch(int reg) {
5992 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01005993}
5994
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005995HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
5996 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005997 switch (desired_class_load_kind) {
5998 case HLoadClass::LoadKind::kReferrersClass:
5999 break;
6000 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
6001 DCHECK(!GetCompilerOptions().GetCompilePic());
6002 break;
6003 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
6004 DCHECK(GetCompilerOptions().GetCompilePic());
6005 FALLTHROUGH_INTENDED;
6006 case HLoadClass::LoadKind::kDexCachePcRelative:
6007 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
6008 // We disable pc-relative load when there is an irreducible loop, as the optimization
6009 // is incompatible with it.
6010 // TODO: Create as many X86ComputeBaseMethodAddress instructions as needed for methods
6011 // with irreducible loops.
6012 if (GetGraph()->HasIrreducibleLoops()) {
6013 return HLoadClass::LoadKind::kDexCacheViaMethod;
6014 }
6015 break;
6016 case HLoadClass::LoadKind::kBootImageAddress:
6017 break;
6018 case HLoadClass::LoadKind::kDexCacheAddress:
6019 DCHECK(Runtime::Current()->UseJitCompilation());
6020 break;
6021 case HLoadClass::LoadKind::kDexCacheViaMethod:
6022 break;
6023 }
6024 return desired_class_load_kind;
6025}
6026
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006027void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006028 if (cls->NeedsAccessCheck()) {
6029 InvokeRuntimeCallingConvention calling_convention;
6030 CodeGenerator::CreateLoadClassLocationSummary(
6031 cls,
6032 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
6033 Location::RegisterLocation(EAX),
6034 /* code_generator_supports_read_barrier */ true);
6035 return;
6036 }
6037
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006038 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6039 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006040 ? LocationSummary::kCallOnSlowPath
6041 : LocationSummary::kNoCall;
6042 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006043 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006044 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006045 }
6046
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006047 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
6048 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
6049 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
6050 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
6051 load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
6052 locations->SetInAt(0, Location::RequiresRegister());
6053 }
6054 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006055}
6056
6057void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01006058 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01006059 if (cls->NeedsAccessCheck()) {
6060 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex());
Serban Constantinescuba45db02016-07-12 22:53:02 +01006061 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006062 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01006063 return;
6064 }
6065
Roland Levillain0d5a2812015-11-13 10:07:31 +00006066 Location out_loc = locations->Out();
6067 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006068
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006069 bool generate_null_check = false;
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006070 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006071 switch (cls->GetLoadKind()) {
6072 case HLoadClass::LoadKind::kReferrersClass: {
6073 DCHECK(!cls->CanCallRuntime());
6074 DCHECK(!cls->MustGenerateClinitCheck());
6075 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6076 Register current_method = locations->InAt(0).AsRegister<Register>();
6077 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006078 cls,
6079 out_loc,
6080 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01006081 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006082 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006083 break;
6084 }
6085 case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006086 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006087 __ movl(out, Immediate(/* placeholder */ 0));
6088 codegen_->RecordTypePatch(cls);
6089 break;
6090 }
6091 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006092 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006093 Register method_address = locations->InAt(0).AsRegister<Register>();
6094 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
6095 codegen_->RecordTypePatch(cls);
6096 break;
6097 }
6098 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006099 DCHECK(!requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006100 DCHECK_NE(cls->GetAddress(), 0u);
6101 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
6102 __ movl(out, Immediate(address));
6103 codegen_->RecordSimplePatch();
6104 break;
6105 }
6106 case HLoadClass::LoadKind::kDexCacheAddress: {
6107 DCHECK_NE(cls->GetAddress(), 0u);
6108 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
6109 // /* GcRoot<mirror::Class> */ out = *address
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006110 GenerateGcRootFieldLoad(cls,
6111 out_loc,
6112 Address::Absolute(address),
Roland Levillain00468f32016-10-27 18:02:48 +01006113 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006114 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006115 generate_null_check = !cls->IsInDexCache();
6116 break;
6117 }
6118 case HLoadClass::LoadKind::kDexCachePcRelative: {
6119 Register base_reg = locations->InAt(0).AsRegister<Register>();
6120 uint32_t offset = cls->GetDexCacheElementOffset();
6121 Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
6122 // /* GcRoot<mirror::Class> */ out = *(base + offset) /* PC-relative */
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006123 GenerateGcRootFieldLoad(cls,
6124 out_loc,
6125 Address(base_reg, CodeGeneratorX86::kDummy32BitOffset),
6126 fixup_label,
6127 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006128 generate_null_check = !cls->IsInDexCache();
6129 break;
6130 }
6131 case HLoadClass::LoadKind::kDexCacheViaMethod: {
6132 // /* GcRoot<mirror::Class>[] */ out =
6133 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
6134 Register current_method = locations->InAt(0).AsRegister<Register>();
6135 __ movl(out, Address(current_method,
6136 ArtMethod::DexCacheResolvedTypesOffset(kX86PointerSize).Int32Value()));
6137 // /* GcRoot<mirror::Class> */ out = out[type_index]
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006138 GenerateGcRootFieldLoad(cls,
6139 out_loc,
6140 Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())),
Roland Levillain00468f32016-10-27 18:02:48 +01006141 /* fixup_label */ nullptr,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006142 requires_read_barrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006143 generate_null_check = !cls->IsInDexCache();
6144 break;
6145 }
6146 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006147
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006148 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6149 DCHECK(cls->CanCallRuntime());
6150 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
6151 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
6152 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006153
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006154 if (generate_null_check) {
6155 __ testl(out, out);
6156 __ j(kEqual, slow_path->GetEntryLabel());
6157 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006158
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006159 if (cls->MustGenerateClinitCheck()) {
6160 GenerateClassInitializationCheck(slow_path, out);
6161 } else {
6162 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006163 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006164 }
6165}
6166
6167void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6168 LocationSummary* locations =
6169 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
6170 locations->SetInAt(0, Location::RequiresRegister());
6171 if (check->HasUses()) {
6172 locations->SetOut(Location::SameAsFirstInput());
6173 }
6174}
6175
6176void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006177 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07006178 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006179 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006180 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006181 GenerateClassInitializationCheck(slow_path,
6182 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006183}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006184
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006185void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006186 SlowPathCode* slow_path, Register class_reg) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006187 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
6188 Immediate(mirror::Class::kStatusInitialized));
6189 __ j(kLess, slow_path->GetEntryLabel());
6190 __ Bind(slow_path->GetExitLabel());
6191 // No need for memory fence, thanks to the X86 memory model.
6192}
6193
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006194HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6195 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006196 switch (desired_string_load_kind) {
6197 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
6198 DCHECK(!GetCompilerOptions().GetCompilePic());
6199 break;
6200 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
6201 DCHECK(GetCompilerOptions().GetCompilePic());
6202 FALLTHROUGH_INTENDED;
Vladimir Markoaad75c62016-10-03 08:46:48 +00006203 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006204 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006205 // We disable pc-relative load when there is an irreducible loop, as the optimization
6206 // is incompatible with it.
6207 // TODO: Create as many X86ComputeBaseMethodAddress instructions as needed for methods
6208 // with irreducible loops.
6209 if (GetGraph()->HasIrreducibleLoops()) {
6210 return HLoadString::LoadKind::kDexCacheViaMethod;
6211 }
6212 break;
6213 case HLoadString::LoadKind::kBootImageAddress:
6214 break;
6215 case HLoadString::LoadKind::kDexCacheAddress:
Calin Juravleffc87072016-04-20 14:22:09 +01006216 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006217 break;
6218 case HLoadString::LoadKind::kDexCacheViaMethod:
6219 break;
6220 }
6221 return desired_string_load_kind;
6222}
6223
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006224void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006225 LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier)
Vladimir Markoaad75c62016-10-03 08:46:48 +00006226 ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod)
6227 ? LocationSummary::kCallOnMainOnly
6228 : LocationSummary::kCallOnSlowPath)
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006229 : LocationSummary::kNoCall;
6230 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006231 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006232 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006233 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006234 locations->SetInAt(0, Location::RequiresRegister());
6235 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006236 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
6237 locations->SetOut(Location::RegisterLocation(EAX));
6238 } else {
6239 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006240 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6241 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6242 // Rely on the pResolveString and/or marking to save everything.
6243 RegisterSet caller_saves = RegisterSet::Empty();
6244 InvokeRuntimeCallingConvention calling_convention;
6245 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6246 locations->SetCustomSlowPathCallerSaves(caller_saves);
6247 } else {
6248 // For non-Baker read barrier we have a temp-clobbering call.
6249 }
6250 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006251 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006252}
6253
6254void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006255 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006256 Location out_loc = locations->Out();
6257 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006258
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006259 switch (load->GetLoadKind()) {
6260 case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006261 __ movl(out, Immediate(/* placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006262 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006263 return; // No dex cache slow path.
6264 }
6265 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006266 Register method_address = locations->InAt(0).AsRegister<Register>();
6267 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006268 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006269 return; // No dex cache slow path.
6270 }
6271 case HLoadString::LoadKind::kBootImageAddress: {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006272 DCHECK_NE(load->GetAddress(), 0u);
6273 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
6274 __ movl(out, Immediate(address));
6275 codegen_->RecordSimplePatch();
6276 return; // No dex cache slow path.
6277 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006278 case HLoadString::LoadKind::kBssEntry: {
6279 Register method_address = locations->InAt(0).AsRegister<Register>();
6280 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6281 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
6282 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
Roland Levillain00468f32016-10-27 18:02:48 +01006283 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kEmitCompilerReadBarrier);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006284 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
6285 codegen_->AddSlowPath(slow_path);
6286 __ testl(out, out);
6287 __ j(kEqual, slow_path->GetEntryLabel());
6288 __ Bind(slow_path->GetExitLabel());
6289 return;
6290 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006291 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006292 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006293 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006294
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006295 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006296 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006297 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006298 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex()));
6299 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6300 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006301}
6302
David Brazdilcb1c0552015-08-04 16:22:25 +01006303static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006304 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006305}
6306
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006307void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6308 LocationSummary* locations =
6309 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6310 locations->SetOut(Location::RequiresRegister());
6311}
6312
6313void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006314 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6315}
6316
6317void LocationsBuilderX86::VisitClearException(HClearException* clear) {
6318 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6319}
6320
6321void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6322 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006323}
6324
6325void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
6326 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006327 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006328 InvokeRuntimeCallingConvention calling_convention;
6329 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6330}
6331
6332void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006333 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006334 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006335}
6336
Roland Levillain7c1559a2015-12-15 10:55:36 +00006337static bool TypeCheckNeedsATemporary(TypeCheckKind type_check_kind) {
6338 return kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006339 !kUseBakerReadBarrier &&
6340 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006341 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6342 type_check_kind == TypeCheckKind::kArrayObjectCheck);
6343}
6344
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006345void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006346 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006347 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006348 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006349 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006350 case TypeCheckKind::kExactCheck:
6351 case TypeCheckKind::kAbstractClassCheck:
6352 case TypeCheckKind::kClassHierarchyCheck:
6353 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006354 call_kind =
6355 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01006356 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006357 break;
6358 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006359 case TypeCheckKind::kUnresolvedCheck:
6360 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006361 call_kind = LocationSummary::kCallOnSlowPath;
6362 break;
6363 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006364
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006365 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006366 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006367 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006368 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006369 locations->SetInAt(0, Location::RequiresRegister());
6370 locations->SetInAt(1, Location::Any());
6371 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6372 locations->SetOut(Location::RequiresRegister());
6373 // When read barriers are enabled, we need a temporary register for
6374 // some cases.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006375 if (TypeCheckNeedsATemporary(type_check_kind)) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00006376 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006377 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006378}
6379
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006380void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006381 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006382 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006383 Location obj_loc = locations->InAt(0);
6384 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006385 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006386 Location out_loc = locations->Out();
6387 Register out = out_loc.AsRegister<Register>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006388 Location maybe_temp_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain7c1559a2015-12-15 10:55:36 +00006389 locations->GetTemp(0) :
6390 Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006391 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006392 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6393 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6394 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006395 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006396 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006397
6398 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006399 // Avoid null check if we know obj is not null.
6400 if (instruction->MustDoNullCheck()) {
6401 __ testl(obj, obj);
6402 __ j(kEqual, &zero);
6403 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006404
Roland Levillain0d5a2812015-11-13 10:07:31 +00006405 // /* HeapReference<Class> */ out = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006406 GenerateReferenceLoadTwoRegisters(instruction, out_loc, obj_loc, class_offset);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006407
Roland Levillain7c1559a2015-12-15 10:55:36 +00006408 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006409 case TypeCheckKind::kExactCheck: {
6410 if (cls.IsRegister()) {
6411 __ cmpl(out, cls.AsRegister<Register>());
6412 } else {
6413 DCHECK(cls.IsStackSlot()) << cls;
6414 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6415 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006416
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006417 // Classes must be equal for the instanceof to succeed.
6418 __ j(kNotEqual, &zero);
6419 __ movl(out, Immediate(1));
6420 __ jmp(&done);
6421 break;
6422 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006423
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006424 case TypeCheckKind::kAbstractClassCheck: {
6425 // If the class is abstract, we eagerly fetch the super class of the
6426 // object to avoid doing a comparison we know will fail.
6427 NearLabel loop;
6428 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006429 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006430 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006431 __ testl(out, out);
6432 // If `out` is null, we use it for the result, and jump to `done`.
6433 __ j(kEqual, &done);
6434 if (cls.IsRegister()) {
6435 __ cmpl(out, cls.AsRegister<Register>());
6436 } else {
6437 DCHECK(cls.IsStackSlot()) << cls;
6438 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6439 }
6440 __ j(kNotEqual, &loop);
6441 __ movl(out, Immediate(1));
6442 if (zero.IsLinked()) {
6443 __ jmp(&done);
6444 }
6445 break;
6446 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006447
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006448 case TypeCheckKind::kClassHierarchyCheck: {
6449 // Walk over the class hierarchy to find a match.
6450 NearLabel loop, success;
6451 __ Bind(&loop);
6452 if (cls.IsRegister()) {
6453 __ cmpl(out, cls.AsRegister<Register>());
6454 } else {
6455 DCHECK(cls.IsStackSlot()) << cls;
6456 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6457 }
6458 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006459 // /* HeapReference<Class> */ out = out->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006460 GenerateReferenceLoadOneRegister(instruction, out_loc, super_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006461 __ testl(out, out);
6462 __ j(kNotEqual, &loop);
6463 // If `out` is null, we use it for the result, and jump to `done`.
6464 __ jmp(&done);
6465 __ Bind(&success);
6466 __ movl(out, Immediate(1));
6467 if (zero.IsLinked()) {
6468 __ jmp(&done);
6469 }
6470 break;
6471 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006472
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006473 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006474 // Do an exact check.
6475 NearLabel exact_check;
6476 if (cls.IsRegister()) {
6477 __ cmpl(out, cls.AsRegister<Register>());
6478 } else {
6479 DCHECK(cls.IsStackSlot()) << cls;
6480 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6481 }
6482 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006483 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006484 // /* HeapReference<Class> */ out = out->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006485 GenerateReferenceLoadOneRegister(instruction, out_loc, component_offset, maybe_temp_loc);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006486 __ testl(out, out);
6487 // If `out` is null, we use it for the result, and jump to `done`.
6488 __ j(kEqual, &done);
6489 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6490 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006491 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006492 __ movl(out, Immediate(1));
6493 __ jmp(&done);
6494 break;
6495 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006496
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006497 case TypeCheckKind::kArrayCheck: {
6498 if (cls.IsRegister()) {
6499 __ cmpl(out, cls.AsRegister<Register>());
6500 } else {
6501 DCHECK(cls.IsStackSlot()) << cls;
6502 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6503 }
6504 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006505 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6506 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006507 codegen_->AddSlowPath(slow_path);
6508 __ j(kNotEqual, slow_path->GetEntryLabel());
6509 __ movl(out, Immediate(1));
6510 if (zero.IsLinked()) {
6511 __ jmp(&done);
6512 }
6513 break;
6514 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006515
Calin Juravle98893e12015-10-02 21:05:03 +01006516 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006517 case TypeCheckKind::kInterfaceCheck: {
6518 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006519 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006520 // cases.
6521 //
6522 // We cannot directly call the InstanceofNonTrivial runtime
6523 // entry point without resorting to a type checking slow path
6524 // here (i.e. by calling InvokeRuntime directly), as it would
6525 // require to assign fixed registers for the inputs of this
6526 // HInstanceOf instruction (following the runtime calling
6527 // convention), which might be cluttered by the potential first
6528 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006529 //
6530 // TODO: Introduce a new runtime entry point taking the object
6531 // to test (instead of its class) as argument, and let it deal
6532 // with the read barrier issues. This will let us refactor this
6533 // case of the `switch` code as it was previously (with a direct
6534 // call to the runtime not using a type checking slow path).
6535 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006536 DCHECK(locations->OnlyCallsOnSlowPath());
6537 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6538 /* is_fatal */ false);
6539 codegen_->AddSlowPath(slow_path);
6540 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006541 if (zero.IsLinked()) {
6542 __ jmp(&done);
6543 }
6544 break;
6545 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006546 }
6547
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006548 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006549 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006550 __ xorl(out, out);
6551 }
6552
6553 if (done.IsLinked()) {
6554 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006555 }
6556
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006557 if (slow_path != nullptr) {
6558 __ Bind(slow_path->GetExitLabel());
6559 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006560}
6561
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006562void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006563 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
6564 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006565 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
6566 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006567 case TypeCheckKind::kExactCheck:
6568 case TypeCheckKind::kAbstractClassCheck:
6569 case TypeCheckKind::kClassHierarchyCheck:
6570 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006571 call_kind = (throws_into_catch || kEmitCompilerReadBarrier) ?
6572 LocationSummary::kCallOnSlowPath :
6573 LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006574 break;
6575 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006576 case TypeCheckKind::kUnresolvedCheck:
6577 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006578 call_kind = LocationSummary::kCallOnSlowPath;
6579 break;
6580 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006581 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6582 locations->SetInAt(0, Location::RequiresRegister());
6583 locations->SetInAt(1, Location::Any());
6584 // Note that TypeCheckSlowPathX86 uses this "temp" register too.
6585 locations->AddTemp(Location::RequiresRegister());
6586 // When read barriers are enabled, we need an additional temporary
6587 // register for some cases.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006588 if (TypeCheckNeedsATemporary(type_check_kind)) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006589 locations->AddTemp(Location::RequiresRegister());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006590 }
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006591}
6592
6593void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006594 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006595 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006596 Location obj_loc = locations->InAt(0);
6597 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006598 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006599 Location temp_loc = locations->GetTemp(0);
6600 Register temp = temp_loc.AsRegister<Register>();
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006601 Location maybe_temp2_loc = TypeCheckNeedsATemporary(type_check_kind) ?
Roland Levillain7c1559a2015-12-15 10:55:36 +00006602 locations->GetTemp(1) :
6603 Location::NoLocation();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006604 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6605 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6606 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6607 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006608
Roland Levillain0d5a2812015-11-13 10:07:31 +00006609 bool is_type_check_slow_path_fatal =
6610 (type_check_kind == TypeCheckKind::kExactCheck ||
6611 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
6612 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
6613 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
6614 !instruction->CanThrowIntoCatchBlock();
6615 SlowPathCode* type_check_slow_path =
6616 new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6617 is_type_check_slow_path_fatal);
6618 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006619
Roland Levillain0d5a2812015-11-13 10:07:31 +00006620 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006621 // Avoid null check if we know obj is not null.
6622 if (instruction->MustDoNullCheck()) {
6623 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006624 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006625 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006626
Roland Levillain0d5a2812015-11-13 10:07:31 +00006627 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006628 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006629
Roland Levillain0d5a2812015-11-13 10:07:31 +00006630 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006631 case TypeCheckKind::kExactCheck:
6632 case TypeCheckKind::kArrayCheck: {
6633 if (cls.IsRegister()) {
6634 __ cmpl(temp, cls.AsRegister<Register>());
6635 } else {
6636 DCHECK(cls.IsStackSlot()) << cls;
6637 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6638 }
6639 // Jump to slow path for throwing the exception or doing a
6640 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006641 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006642 break;
6643 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006644
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006645 case TypeCheckKind::kAbstractClassCheck: {
6646 // If the class is abstract, we eagerly fetch the super class of the
6647 // object to avoid doing a comparison we know will fail.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006648 NearLabel loop, compare_classes;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006649 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006650 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006651 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006652
6653 // If the class reference currently in `temp` is not null, jump
6654 // to the `compare_classes` label to compare it with the checked
6655 // class.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006656 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006657 __ j(kNotEqual, &compare_classes);
6658 // Otherwise, jump to the slow path to throw the exception.
6659 //
6660 // But before, move back the object's class into `temp` before
6661 // going into the slow path, as it has been overwritten in the
6662 // meantime.
6663 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006664 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006665 __ jmp(type_check_slow_path->GetEntryLabel());
6666
6667 __ Bind(&compare_classes);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006668 if (cls.IsRegister()) {
6669 __ cmpl(temp, cls.AsRegister<Register>());
6670 } else {
6671 DCHECK(cls.IsStackSlot()) << cls;
6672 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6673 }
6674 __ j(kNotEqual, &loop);
6675 break;
6676 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006677
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006678 case TypeCheckKind::kClassHierarchyCheck: {
6679 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006680 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006681 __ Bind(&loop);
6682 if (cls.IsRegister()) {
6683 __ cmpl(temp, cls.AsRegister<Register>());
6684 } else {
6685 DCHECK(cls.IsStackSlot()) << cls;
6686 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6687 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006688 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006689
Roland Levillain0d5a2812015-11-13 10:07:31 +00006690 // /* HeapReference<Class> */ temp = temp->super_class_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006691 GenerateReferenceLoadOneRegister(instruction, temp_loc, super_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006692
6693 // If the class reference currently in `temp` is not null, jump
6694 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006695 __ testl(temp, temp);
6696 __ j(kNotEqual, &loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006697 // Otherwise, jump to the slow path to throw the exception.
6698 //
6699 // But before, move back the object's class into `temp` before
6700 // going into the slow path, as it has been overwritten in the
6701 // meantime.
6702 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006703 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006704 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006705 break;
6706 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006707
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006708 case TypeCheckKind::kArrayObjectCheck: {
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006709 // Do an exact check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006710 NearLabel check_non_primitive_component_type;
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006711 if (cls.IsRegister()) {
6712 __ cmpl(temp, cls.AsRegister<Register>());
6713 } else {
6714 DCHECK(cls.IsStackSlot()) << cls;
6715 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6716 }
6717 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006718
6719 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006720 // /* HeapReference<Class> */ temp = temp->component_type_
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006721 GenerateReferenceLoadOneRegister(instruction, temp_loc, component_offset, maybe_temp2_loc);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006722
6723 // If the component type is not null (i.e. the object is indeed
6724 // an array), jump to label `check_non_primitive_component_type`
6725 // to further check that this component type is not a primitive
6726 // type.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006727 __ testl(temp, temp);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006728 __ j(kNotEqual, &check_non_primitive_component_type);
6729 // Otherwise, jump to the slow path to throw the exception.
6730 //
6731 // But before, move back the object's class into `temp` before
6732 // going into the slow path, as it has been overwritten in the
6733 // meantime.
6734 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006735 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006736 __ jmp(type_check_slow_path->GetEntryLabel());
6737
6738 __ Bind(&check_non_primitive_component_type);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006739 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0d5a2812015-11-13 10:07:31 +00006740 __ j(kEqual, &done);
6741 // Same comment as above regarding `temp` and the slow path.
6742 // /* HeapReference<Class> */ temp = obj->klass_
Vladimir Marko953437b2016-08-24 08:30:46 +00006743 GenerateReferenceLoadTwoRegisters(instruction, temp_loc, obj_loc, class_offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006744 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006745 break;
6746 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006747
Calin Juravle98893e12015-10-02 21:05:03 +01006748 case TypeCheckKind::kUnresolvedCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006749 case TypeCheckKind::kInterfaceCheck:
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006750 // We always go into the type check slow path for the unresolved
6751 // and interface check cases.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006752 //
6753 // We cannot directly call the CheckCast runtime entry point
6754 // without resorting to a type checking slow path here (i.e. by
6755 // calling InvokeRuntime directly), as it would require to
6756 // assign fixed registers for the inputs of this HInstanceOf
6757 // instruction (following the runtime calling convention), which
6758 // might be cluttered by the potential first read barrier
6759 // emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006760 //
6761 // TODO: Introduce a new runtime entry point taking the object
6762 // to test (instead of its class) as argument, and let it deal
6763 // with the read barrier issues. This will let us refactor this
6764 // case of the `switch` code as it was previously (with a direct
6765 // call to the runtime not using a type checking slow path).
6766 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006767 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006768 break;
6769 }
6770 __ Bind(&done);
6771
Roland Levillain0d5a2812015-11-13 10:07:31 +00006772 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006773}
6774
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006775void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
6776 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006777 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006778 InvokeRuntimeCallingConvention calling_convention;
6779 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6780}
6781
6782void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006783 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
6784 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006785 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006786 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006787 if (instruction->IsEnter()) {
6788 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6789 } else {
6790 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6791 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006792}
6793
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006794void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6795void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6796void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6797
6798void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6799 LocationSummary* locations =
6800 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6801 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6802 || instruction->GetResultType() == Primitive::kPrimLong);
6803 locations->SetInAt(0, Location::RequiresRegister());
6804 locations->SetInAt(1, Location::Any());
6805 locations->SetOut(Location::SameAsFirstInput());
6806}
6807
6808void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
6809 HandleBitwiseOperation(instruction);
6810}
6811
6812void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
6813 HandleBitwiseOperation(instruction);
6814}
6815
6816void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
6817 HandleBitwiseOperation(instruction);
6818}
6819
6820void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6821 LocationSummary* locations = instruction->GetLocations();
6822 Location first = locations->InAt(0);
6823 Location second = locations->InAt(1);
6824 DCHECK(first.Equals(locations->Out()));
6825
6826 if (instruction->GetResultType() == Primitive::kPrimInt) {
6827 if (second.IsRegister()) {
6828 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006829 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006830 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006831 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006832 } else {
6833 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006834 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006835 }
6836 } else if (second.IsConstant()) {
6837 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006838 __ andl(first.AsRegister<Register>(),
6839 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006840 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006841 __ orl(first.AsRegister<Register>(),
6842 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006843 } else {
6844 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00006845 __ xorl(first.AsRegister<Register>(),
6846 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006847 }
6848 } else {
6849 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006850 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006851 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006852 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006853 } else {
6854 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006855 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006856 }
6857 }
6858 } else {
6859 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
6860 if (second.IsRegisterPair()) {
6861 if (instruction->IsAnd()) {
6862 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6863 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6864 } else if (instruction->IsOr()) {
6865 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6866 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6867 } else {
6868 DCHECK(instruction->IsXor());
6869 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6870 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6871 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006872 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006873 if (instruction->IsAnd()) {
6874 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6875 __ andl(first.AsRegisterPairHigh<Register>(),
6876 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6877 } else if (instruction->IsOr()) {
6878 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6879 __ orl(first.AsRegisterPairHigh<Register>(),
6880 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6881 } else {
6882 DCHECK(instruction->IsXor());
6883 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6884 __ xorl(first.AsRegisterPairHigh<Register>(),
6885 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6886 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006887 } else {
6888 DCHECK(second.IsConstant()) << second;
6889 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006890 int32_t low_value = Low32Bits(value);
6891 int32_t high_value = High32Bits(value);
6892 Immediate low(low_value);
6893 Immediate high(high_value);
6894 Register first_low = first.AsRegisterPairLow<Register>();
6895 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006896 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006897 if (low_value == 0) {
6898 __ xorl(first_low, first_low);
6899 } else if (low_value != -1) {
6900 __ andl(first_low, low);
6901 }
6902 if (high_value == 0) {
6903 __ xorl(first_high, first_high);
6904 } else if (high_value != -1) {
6905 __ andl(first_high, high);
6906 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006907 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006908 if (low_value != 0) {
6909 __ orl(first_low, low);
6910 }
6911 if (high_value != 0) {
6912 __ orl(first_high, high);
6913 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006914 } else {
6915 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006916 if (low_value != 0) {
6917 __ xorl(first_low, low);
6918 }
6919 if (high_value != 0) {
6920 __ xorl(first_high, high);
6921 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006922 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006923 }
6924 }
6925}
6926
Roland Levillain7c1559a2015-12-15 10:55:36 +00006927void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(HInstruction* instruction,
6928 Location out,
6929 uint32_t offset,
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006930 Location maybe_temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006931 Register out_reg = out.AsRegister<Register>();
6932 if (kEmitCompilerReadBarrier) {
6933 if (kUseBakerReadBarrier) {
6934 // Load with fast path based Baker's read barrier.
6935 // /* HeapReference<Object> */ out = *(out + offset)
6936 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006937 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006938 } else {
6939 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006940 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00006941 // in the following move operation, as we will need it for the
6942 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00006943 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006944 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006945 // /* HeapReference<Object> */ out = *(out + offset)
6946 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00006947 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006948 }
6949 } else {
6950 // Plain load with no read barrier.
6951 // /* HeapReference<Object> */ out = *(out + offset)
6952 __ movl(out_reg, Address(out_reg, offset));
6953 __ MaybeUnpoisonHeapReference(out_reg);
6954 }
6955}
6956
6957void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(HInstruction* instruction,
6958 Location out,
6959 Location obj,
Vladimir Marko953437b2016-08-24 08:30:46 +00006960 uint32_t offset) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006961 Register out_reg = out.AsRegister<Register>();
6962 Register obj_reg = obj.AsRegister<Register>();
6963 if (kEmitCompilerReadBarrier) {
6964 if (kUseBakerReadBarrier) {
6965 // Load with fast path based Baker's read barrier.
6966 // /* HeapReference<Object> */ out = *(obj + offset)
6967 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00006968 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006969 } else {
6970 // Load with slow path based read barrier.
6971 // /* HeapReference<Object> */ out = *(obj + offset)
6972 __ movl(out_reg, Address(obj_reg, offset));
6973 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6974 }
6975 } else {
6976 // Plain load with no read barrier.
6977 // /* HeapReference<Object> */ out = *(obj + offset)
6978 __ movl(out_reg, Address(obj_reg, offset));
6979 __ MaybeUnpoisonHeapReference(out_reg);
6980 }
6981}
6982
6983void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(HInstruction* instruction,
6984 Location root,
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006985 const Address& address,
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006986 Label* fixup_label,
6987 bool requires_read_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006988 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006989 if (requires_read_barrier) {
6990 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006991 if (kUseBakerReadBarrier) {
6992 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6993 // Baker's read barrier are used:
6994 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006995 // root = *address;
Roland Levillain7c1559a2015-12-15 10:55:36 +00006996 // if (Thread::Current()->GetIsGcMarking()) {
6997 // root = ReadBarrier::Mark(root)
6998 // }
6999
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007000 // /* GcRoot<mirror::Object> */ root = *address
7001 __ movl(root_reg, address);
7002 if (fixup_label != nullptr) {
7003 __ Bind(fixup_label);
7004 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007005 static_assert(
7006 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7007 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7008 "have different sizes.");
7009 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7010 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7011 "have different sizes.");
7012
Vladimir Marko953437b2016-08-24 08:30:46 +00007013 // Slow path marking the GC root `root`.
7014 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007015 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007016 codegen_->AddSlowPath(slow_path);
7017
Andreas Gampe542451c2016-07-26 09:02:02 -07007018 __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00007019 Immediate(0));
7020 __ j(kNotEqual, slow_path->GetEntryLabel());
7021 __ Bind(slow_path->GetExitLabel());
7022 } else {
7023 // GC root loaded through a slow path for read barriers other
7024 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007025 // /* GcRoot<mirror::Object>* */ root = address
7026 __ leal(root_reg, address);
7027 if (fixup_label != nullptr) {
7028 __ Bind(fixup_label);
7029 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007030 // /* mirror::Object* */ root = root->Read()
7031 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7032 }
7033 } else {
7034 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007035 // /* GcRoot<mirror::Object> */ root = *address
7036 __ movl(root_reg, address);
7037 if (fixup_label != nullptr) {
7038 __ Bind(fixup_label);
7039 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007040 // Note that GC roots are not affected by heap poisoning, thus we
7041 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007042 }
7043}
7044
7045void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7046 Location ref,
7047 Register obj,
7048 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007049 bool needs_null_check) {
7050 DCHECK(kEmitCompilerReadBarrier);
7051 DCHECK(kUseBakerReadBarrier);
7052
7053 // /* HeapReference<Object> */ ref = *(obj + offset)
7054 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007055 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007056}
7057
7058void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7059 Location ref,
7060 Register obj,
7061 uint32_t data_offset,
7062 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007063 bool needs_null_check) {
7064 DCHECK(kEmitCompilerReadBarrier);
7065 DCHECK(kUseBakerReadBarrier);
7066
Roland Levillain3d312422016-06-23 13:53:42 +01007067 static_assert(
7068 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7069 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007070 // /* HeapReference<Object> */ ref =
7071 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007072 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007073 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007074}
7075
7076void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7077 Location ref,
7078 Register obj,
7079 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007080 bool needs_null_check,
7081 bool always_update_field,
7082 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007083 DCHECK(kEmitCompilerReadBarrier);
7084 DCHECK(kUseBakerReadBarrier);
7085
7086 // In slow path based read barriers, the read barrier call is
7087 // inserted after the original load. However, in fast path based
7088 // Baker's read barriers, we need to perform the load of
7089 // mirror::Object::monitor_ *before* the original reference load.
7090 // This load-load ordering is required by the read barrier.
7091 // The fast path/slow path (for Baker's algorithm) should look like:
7092 //
7093 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7094 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7095 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007096 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007097 // if (is_gray) {
7098 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7099 // }
7100 //
7101 // Note: the original implementation in ReadBarrier::Barrier is
7102 // slightly more complex as:
7103 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007104 // the high-bits of rb_state, which are expected to be all zeroes
7105 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7106 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007107 // - it performs additional checks that we do not do here for
7108 // performance reasons.
7109
7110 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007111 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7112
Vladimir Marko953437b2016-08-24 08:30:46 +00007113 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007114 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
7115 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007116 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7117 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7118 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7119
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007120 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007121 // ref = ReadBarrier::Mark(ref);
7122 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7123 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007124 if (needs_null_check) {
7125 MaybeRecordImplicitNullCheck(instruction);
7126 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007127
7128 // Load fence to prevent load-load reordering.
7129 // Note that this is a no-op, thanks to the x86 memory model.
7130 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7131
7132 // The actual reference load.
7133 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007134 __ movl(ref_reg, src); // Flags are unaffected.
7135
7136 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7137 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007138 SlowPathCode* slow_path;
7139 if (always_update_field) {
7140 DCHECK(temp != nullptr);
7141 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
7142 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp);
7143 } else {
7144 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
7145 instruction, ref, /* unpoison_ref_before_marking */ true);
7146 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007147 AddSlowPath(slow_path);
7148
7149 // We have done the "if" of the gray bit check above, now branch based on the flags.
7150 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007151
7152 // Object* ref = ref_addr->AsMirrorPtr()
7153 __ MaybeUnpoisonHeapReference(ref_reg);
7154
Roland Levillain7c1559a2015-12-15 10:55:36 +00007155 __ Bind(slow_path->GetExitLabel());
7156}
7157
7158void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7159 Location out,
7160 Location ref,
7161 Location obj,
7162 uint32_t offset,
7163 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007164 DCHECK(kEmitCompilerReadBarrier);
7165
Roland Levillain7c1559a2015-12-15 10:55:36 +00007166 // Insert a slow path based read barrier *after* the reference load.
7167 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007168 // If heap poisoning is enabled, the unpoisoning of the loaded
7169 // reference will be carried out by the runtime within the slow
7170 // path.
7171 //
7172 // Note that `ref` currently does not get unpoisoned (when heap
7173 // poisoning is enabled), which is alright as the `ref` argument is
7174 // not used by the artReadBarrierSlow entry point.
7175 //
7176 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
7177 SlowPathCode* slow_path = new (GetGraph()->GetArena())
7178 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7179 AddSlowPath(slow_path);
7180
Roland Levillain0d5a2812015-11-13 10:07:31 +00007181 __ jmp(slow_path->GetEntryLabel());
7182 __ Bind(slow_path->GetExitLabel());
7183}
7184
Roland Levillain7c1559a2015-12-15 10:55:36 +00007185void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7186 Location out,
7187 Location ref,
7188 Location obj,
7189 uint32_t offset,
7190 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007191 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007192 // Baker's read barriers shall be handled by the fast path
7193 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7194 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007195 // If heap poisoning is enabled, unpoisoning will be taken care of
7196 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007197 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007198 } else if (kPoisonHeapReferences) {
7199 __ UnpoisonHeapReference(out.AsRegister<Register>());
7200 }
7201}
7202
Roland Levillain7c1559a2015-12-15 10:55:36 +00007203void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7204 Location out,
7205 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007206 DCHECK(kEmitCompilerReadBarrier);
7207
Roland Levillain7c1559a2015-12-15 10:55:36 +00007208 // Insert a slow path based read barrier *after* the GC root load.
7209 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007210 // Note that GC roots are not affected by heap poisoning, so we do
7211 // not need to do anything special for this here.
7212 SlowPathCode* slow_path =
7213 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86(instruction, out, root);
7214 AddSlowPath(slow_path);
7215
Roland Levillain0d5a2812015-11-13 10:07:31 +00007216 __ jmp(slow_path->GetEntryLabel());
7217 __ Bind(slow_path->GetExitLabel());
7218}
7219
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007220void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007221 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007222 LOG(FATAL) << "Unreachable";
7223}
7224
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007225void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007226 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007227 LOG(FATAL) << "Unreachable";
7228}
7229
Mark Mendellfe57faa2015-09-18 09:26:15 -04007230// Simple implementation of packed switch - generate cascaded compare/jumps.
7231void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7232 LocationSummary* locations =
7233 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7234 locations->SetInAt(0, Location::RequiresRegister());
7235}
7236
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007237void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
7238 int32_t lower_bound,
7239 uint32_t num_entries,
7240 HBasicBlock* switch_block,
7241 HBasicBlock* default_block) {
7242 // Figure out the correct compare values and jump conditions.
7243 // Handle the first compare/branch as a special case because it might
7244 // jump to the default case.
7245 DCHECK_GT(num_entries, 2u);
7246 Condition first_condition;
7247 uint32_t index;
7248 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7249 if (lower_bound != 0) {
7250 first_condition = kLess;
7251 __ cmpl(value_reg, Immediate(lower_bound));
7252 __ j(first_condition, codegen_->GetLabelOf(default_block));
7253 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007254
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007255 index = 1;
7256 } else {
7257 // Handle all the compare/jumps below.
7258 first_condition = kBelow;
7259 index = 0;
7260 }
7261
7262 // Handle the rest of the compare/jumps.
7263 for (; index + 1 < num_entries; index += 2) {
7264 int32_t compare_to_value = lower_bound + index + 1;
7265 __ cmpl(value_reg, Immediate(compare_to_value));
7266 // Jump to successors[index] if value < case_value[index].
7267 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7268 // Jump to successors[index + 1] if value == case_value[index + 1].
7269 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7270 }
7271
7272 if (index != num_entries) {
7273 // There are an odd number of entries. Handle the last one.
7274 DCHECK_EQ(index + 1, num_entries);
7275 __ cmpl(value_reg, Immediate(lower_bound + index));
7276 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007277 }
7278
7279 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007280 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
7281 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007282 }
7283}
7284
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007285void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7286 int32_t lower_bound = switch_instr->GetStartValue();
7287 uint32_t num_entries = switch_instr->GetNumEntries();
7288 LocationSummary* locations = switch_instr->GetLocations();
7289 Register value_reg = locations->InAt(0).AsRegister<Register>();
7290
7291 GenPackedSwitchWithCompares(value_reg,
7292 lower_bound,
7293 num_entries,
7294 switch_instr->GetBlock(),
7295 switch_instr->GetDefaultBlock());
7296}
7297
Mark Mendell805b3b52015-09-18 14:10:29 -04007298void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7299 LocationSummary* locations =
7300 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7301 locations->SetInAt(0, Location::RequiresRegister());
7302
7303 // Constant area pointer.
7304 locations->SetInAt(1, Location::RequiresRegister());
7305
7306 // And the temporary we need.
7307 locations->AddTemp(Location::RequiresRegister());
7308}
7309
7310void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7311 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007312 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04007313 LocationSummary* locations = switch_instr->GetLocations();
7314 Register value_reg = locations->InAt(0).AsRegister<Register>();
7315 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7316
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007317 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7318 GenPackedSwitchWithCompares(value_reg,
7319 lower_bound,
7320 num_entries,
7321 switch_instr->GetBlock(),
7322 default_block);
7323 return;
7324 }
7325
Mark Mendell805b3b52015-09-18 14:10:29 -04007326 // Optimizing has a jump area.
7327 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
7328 Register constant_area = locations->InAt(1).AsRegister<Register>();
7329
7330 // Remove the bias, if needed.
7331 if (lower_bound != 0) {
7332 __ leal(temp_reg, Address(value_reg, -lower_bound));
7333 value_reg = temp_reg;
7334 }
7335
7336 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007337 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04007338 __ cmpl(value_reg, Immediate(num_entries - 1));
7339 __ j(kAbove, codegen_->GetLabelOf(default_block));
7340
7341 // We are in the range of the table.
7342 // Load (target-constant_area) from the jump table, indexing by the value.
7343 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
7344
7345 // Compute the actual target address by adding in constant_area.
7346 __ addl(temp_reg, constant_area);
7347
7348 // And jump.
7349 __ jmp(temp_reg);
7350}
7351
Mark Mendell0616ae02015-04-17 12:49:27 -04007352void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
7353 HX86ComputeBaseMethodAddress* insn) {
7354 LocationSummary* locations =
7355 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7356 locations->SetOut(Location::RequiresRegister());
7357}
7358
7359void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
7360 HX86ComputeBaseMethodAddress* insn) {
7361 LocationSummary* locations = insn->GetLocations();
7362 Register reg = locations->Out().AsRegister<Register>();
7363
7364 // Generate call to next instruction.
7365 Label next_instruction;
7366 __ call(&next_instruction);
7367 __ Bind(&next_instruction);
7368
7369 // Remember this offset for later use with constant area.
7370 codegen_->SetMethodAddressOffset(GetAssembler()->CodeSize());
7371
7372 // Grab the return address off the stack.
7373 __ popl(reg);
7374}
7375
7376void LocationsBuilderX86::VisitX86LoadFromConstantTable(
7377 HX86LoadFromConstantTable* insn) {
7378 LocationSummary* locations =
7379 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7380
7381 locations->SetInAt(0, Location::RequiresRegister());
7382 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
7383
7384 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00007385 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007386 return;
7387 }
7388
7389 switch (insn->GetType()) {
7390 case Primitive::kPrimFloat:
7391 case Primitive::kPrimDouble:
7392 locations->SetOut(Location::RequiresFpuRegister());
7393 break;
7394
7395 case Primitive::kPrimInt:
7396 locations->SetOut(Location::RequiresRegister());
7397 break;
7398
7399 default:
7400 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7401 }
7402}
7403
7404void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00007405 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007406 return;
7407 }
7408
7409 LocationSummary* locations = insn->GetLocations();
7410 Location out = locations->Out();
7411 Register const_area = locations->InAt(0).AsRegister<Register>();
7412 HConstant *value = insn->GetConstant();
7413
7414 switch (insn->GetType()) {
7415 case Primitive::kPrimFloat:
7416 __ movss(out.AsFpuRegister<XmmRegister>(),
7417 codegen_->LiteralFloatAddress(value->AsFloatConstant()->GetValue(), const_area));
7418 break;
7419
7420 case Primitive::kPrimDouble:
7421 __ movsd(out.AsFpuRegister<XmmRegister>(),
7422 codegen_->LiteralDoubleAddress(value->AsDoubleConstant()->GetValue(), const_area));
7423 break;
7424
7425 case Primitive::kPrimInt:
7426 __ movl(out.AsRegister<Register>(),
7427 codegen_->LiteralInt32Address(value->AsIntConstant()->GetValue(), const_area));
7428 break;
7429
7430 default:
7431 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7432 }
7433}
7434
Mark Mendell0616ae02015-04-17 12:49:27 -04007435/**
7436 * Class to handle late fixup of offsets into constant area.
7437 */
Vladimir Marko5233f932015-09-29 19:01:15 +01007438class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04007439 public:
Mark Mendell805b3b52015-09-18 14:10:29 -04007440 RIPFixup(CodeGeneratorX86& codegen, size_t offset)
7441 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7442
7443 protected:
7444 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7445
7446 CodeGeneratorX86* codegen_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007447
7448 private:
7449 void Process(const MemoryRegion& region, int pos) OVERRIDE {
7450 // Patch the correct offset for the instruction. The place to patch is the
7451 // last 4 bytes of the instruction.
7452 // The value to patch is the distance from the offset in the constant area
7453 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04007454 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
7455 int32_t relative_position = constant_offset - codegen_->GetMethodAddressOffset();;
Mark Mendell0616ae02015-04-17 12:49:27 -04007456
7457 // Patch in the right value.
7458 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7459 }
7460
Mark Mendell0616ae02015-04-17 12:49:27 -04007461 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04007462 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007463};
7464
Mark Mendell805b3b52015-09-18 14:10:29 -04007465/**
7466 * Class to handle late fixup of offsets to a jump table that will be created in the
7467 * constant area.
7468 */
7469class JumpTableRIPFixup : public RIPFixup {
7470 public:
7471 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
7472 : RIPFixup(codegen, static_cast<size_t>(-1)), switch_instr_(switch_instr) {}
7473
7474 void CreateJumpTable() {
7475 X86Assembler* assembler = codegen_->GetAssembler();
7476
7477 // Ensure that the reference to the jump table has the correct offset.
7478 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7479 SetOffset(offset_in_constant_table);
7480
7481 // The label values in the jump table are computed relative to the
7482 // instruction addressing the constant area.
7483 const int32_t relative_offset = codegen_->GetMethodAddressOffset();
7484
7485 // Populate the jump table with the correct values for the jump table.
7486 int32_t num_entries = switch_instr_->GetNumEntries();
7487 HBasicBlock* block = switch_instr_->GetBlock();
7488 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7489 // The value that we want is the target offset - the position of the table.
7490 for (int32_t i = 0; i < num_entries; i++) {
7491 HBasicBlock* b = successors[i];
7492 Label* l = codegen_->GetLabelOf(b);
7493 DCHECK(l->IsBound());
7494 int32_t offset_to_block = l->Position() - relative_offset;
7495 assembler->AppendInt32(offset_to_block);
7496 }
7497 }
7498
7499 private:
7500 const HX86PackedSwitch* switch_instr_;
7501};
7502
7503void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
7504 // Generate the constant area if needed.
7505 X86Assembler* assembler = GetAssembler();
7506 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7507 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
7508 // byte values.
7509 assembler->Align(4, 0);
7510 constant_area_start_ = assembler->CodeSize();
7511
7512 // Populate any jump tables.
7513 for (auto jump_table : fixups_to_jump_tables_) {
7514 jump_table->CreateJumpTable();
7515 }
7516
7517 // And now add the constant area to the generated code.
7518 assembler->AddConstantArea();
7519 }
7520
7521 // And finish up.
7522 CodeGenerator::Finalize(allocator);
7523}
7524
Mark Mendell0616ae02015-04-17 12:49:27 -04007525Address CodeGeneratorX86::LiteralDoubleAddress(double v, Register reg) {
7526 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7527 return Address(reg, kDummy32BitOffset, fixup);
7528}
7529
7530Address CodeGeneratorX86::LiteralFloatAddress(float v, Register reg) {
7531 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7532 return Address(reg, kDummy32BitOffset, fixup);
7533}
7534
7535Address CodeGeneratorX86::LiteralInt32Address(int32_t v, Register reg) {
7536 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7537 return Address(reg, kDummy32BitOffset, fixup);
7538}
7539
7540Address CodeGeneratorX86::LiteralInt64Address(int64_t v, Register reg) {
7541 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7542 return Address(reg, kDummy32BitOffset, fixup);
7543}
7544
Aart Bika19616e2016-02-01 18:57:58 -08007545void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
7546 if (value == 0) {
7547 __ xorl(dest, dest);
7548 } else {
7549 __ movl(dest, Immediate(value));
7550 }
7551}
7552
7553void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
7554 if (value == 0) {
7555 __ testl(dest, dest);
7556 } else {
7557 __ cmpl(dest, Immediate(value));
7558 }
7559}
7560
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007561void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
7562 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07007563 GenerateIntCompare(lhs_reg, rhs);
7564}
7565
7566void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007567 if (rhs.IsConstant()) {
7568 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007569 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007570 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007571 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007572 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007573 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007574 }
7575}
7576
7577Address CodeGeneratorX86::ArrayAddress(Register obj,
7578 Location index,
7579 ScaleFactor scale,
7580 uint32_t data_offset) {
7581 return index.IsConstant() ?
7582 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7583 Address(obj, index.AsRegister<Register>(), scale, data_offset);
7584}
7585
Mark Mendell805b3b52015-09-18 14:10:29 -04007586Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
7587 Register reg,
7588 Register value) {
7589 // Create a fixup to be used to create and address the jump table.
7590 JumpTableRIPFixup* table_fixup =
7591 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7592
7593 // We have to populate the jump tables.
7594 fixups_to_jump_tables_.push_back(table_fixup);
7595
7596 // We want a scaled address, as we are extracting the correct offset from the table.
7597 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
7598}
7599
Andreas Gampe85b62f22015-09-09 13:15:38 -07007600// TODO: target as memory.
7601void CodeGeneratorX86::MoveFromReturnRegister(Location target, Primitive::Type type) {
7602 if (!target.IsValid()) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007603 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007604 return;
7605 }
7606
7607 DCHECK_NE(type, Primitive::kPrimVoid);
7608
7609 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
7610 if (target.Equals(return_loc)) {
7611 return;
7612 }
7613
7614 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
7615 // with the else branch.
7616 if (type == Primitive::kPrimLong) {
7617 HParallelMove parallel_move(GetGraph()->GetArena());
7618 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), Primitive::kPrimInt, nullptr);
7619 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), Primitive::kPrimInt, nullptr);
7620 GetMoveResolver()->EmitNativeCode(&parallel_move);
7621 } else {
7622 // Let the parallel move resolver take care of all of this.
7623 HParallelMove parallel_move(GetGraph()->GetArena());
7624 parallel_move.AddMove(return_loc, target, type, nullptr);
7625 GetMoveResolver()->EmitNativeCode(&parallel_move);
7626 }
7627}
7628
Roland Levillain4d027112015-07-01 15:41:14 +01007629#undef __
7630
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00007631} // namespace x86
7632} // namespace art