blob: d97f5f541a9f8bf4c061953a4e4108ed9f427f02 [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000023#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040025#include "intrinsics.h"
26#include "intrinsics_x86.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070027#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070028#include "mirror/class-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010029#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000030#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000032#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010033#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000034
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010036
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000040namespace x86 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050044static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010045
Mark Mendell24f2dfa2015-01-14 19:51:45 -050046static constexpr int kC2ConditionMask = 0x400;
47
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000048static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000049
Roland Levillain7cbd27f2016-08-11 23:53:33 +010050// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
51#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070052#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010053
Andreas Gampe85b62f22015-09-09 13:15:38 -070054class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010055 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000056 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Alexandre Rames2ed20af2015-03-06 13:55:35 +000058 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010059 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000061 if (instruction_->CanThrowIntoCatchBlock()) {
62 // Live registers will be restored in the catch block if caught.
63 SaveLiveRegisters(codegen, instruction_->GetLocations());
64 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010065 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010066 instruction_,
67 instruction_->GetDexPc(),
68 this);
Roland Levillain888d0672015-11-23 18:53:50 +000069 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010070 }
71
Alexandre Rames8158f282015-08-07 10:26:17 +010072 bool IsFatal() const OVERRIDE { return true; }
73
Alexandre Rames9931f312015-06-19 14:47:01 +010074 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86"; }
75
Nicolas Geoffraye5038322014-07-04 09:41:32 +010076 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
78};
79
Andreas Gampe85b62f22015-09-09 13:15:38 -070080class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000081 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000082 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000083
Alexandre Rames2ed20af2015-03-06 13:55:35 +000084 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010085 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000086 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010087 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000088 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000089 }
90
Alexandre Rames8158f282015-08-07 10:26:17 +010091 bool IsFatal() const OVERRIDE { return true; }
92
Alexandre Rames9931f312015-06-19 14:47:01 +010093 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86"; }
94
Calin Juravled0d48522014-11-04 16:40:20 +000095 private:
Calin Juravled0d48522014-11-04 16:40:20 +000096 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
97};
98
Andreas Gampe85b62f22015-09-09 13:15:38 -070099class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000100 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000101 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
102 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000103
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000104 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000105 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000106 if (is_div_) {
107 __ negl(reg_);
108 } else {
109 __ movl(reg_, Immediate(0));
110 }
Calin Juravled0d48522014-11-04 16:40:20 +0000111 __ jmp(GetExitLabel());
112 }
113
Alexandre Rames9931f312015-06-19 14:47:01 +0100114 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86"; }
115
Calin Juravled0d48522014-11-04 16:40:20 +0000116 private:
117 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 bool is_div_;
119 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000120};
121
Andreas Gampe85b62f22015-09-09 13:15:38 -0700122class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100123 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000124 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100125
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000126 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100127 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100128 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100129 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000132 if (instruction_->CanThrowIntoCatchBlock()) {
133 // Live registers will be restored in the catch block if caught.
134 SaveLiveRegisters(codegen, instruction_->GetLocations());
135 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400136
137 // Are we using an array length from memory?
138 HInstruction* array_length = instruction_->InputAt(1);
139 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100140 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400141 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
142 // Load the array length into our temporary.
143 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
144 Location array_loc = array_length->GetLocations()->InAt(0);
145 Address array_len(array_loc.AsRegister<Register>(), len_offset);
146 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
147 // Check for conflicts with index.
148 if (length_loc.Equals(locations->InAt(0))) {
149 // We know we aren't using parameter 2.
150 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
151 }
152 __ movl(length_loc.AsRegister<Register>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700153 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100154 __ shrl(length_loc.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700155 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400156 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000157 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100158 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000159 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100160 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400161 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100162 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
163 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100164 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
165 ? kQuickThrowStringBounds
166 : kQuickThrowArrayBounds;
167 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100168 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000169 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100170 }
171
Alexandre Rames8158f282015-08-07 10:26:17 +0100172 bool IsFatal() const OVERRIDE { return true; }
173
Alexandre Rames9931f312015-06-19 14:47:01 +0100174 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86"; }
175
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100177 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
178};
179
Andreas Gampe85b62f22015-09-09 13:15:38 -0700180class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000182 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000183 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000185 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100186 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000187 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100188 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000189 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100190 if (successor_ == nullptr) {
191 __ jmp(GetReturnLabel());
192 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100193 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100194 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000195 }
196
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100197 Label* GetReturnLabel() {
198 DCHECK(successor_ == nullptr);
199 return &return_label_;
200 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000201
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100202 HBasicBlock* GetSuccessor() const {
203 return successor_;
204 }
205
Alexandre Rames9931f312015-06-19 14:47:01 +0100206 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86"; }
207
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000208 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100209 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000210 Label return_label_;
211
212 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
213};
214
Vladimir Markoaad75c62016-10-03 08:46:48 +0000215class LoadStringSlowPathX86 : public SlowPathCode {
216 public:
217 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
218
219 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
220 LocationSummary* locations = instruction_->GetLocations();
221 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
222
223 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
224 __ Bind(GetEntryLabel());
225 SaveLiveRegisters(codegen, locations);
226
227 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000228 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
229 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000230 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
231 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
232 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
233 RestoreLiveRegisters(codegen, locations);
234
235 // Store the resolved String to the BSS entry.
236 Register method_address = locations->InAt(0).AsRegister<Register>();
237 __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
238 locations->Out().AsRegister<Register>());
239 Label* fixup_label = x86_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
240 __ Bind(fixup_label);
241
242 __ jmp(GetExitLabel());
243 }
244
245 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86"; }
246
247 private:
248 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
249};
250
Andreas Gampe85b62f22015-09-09 13:15:38 -0700251class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000252 public:
253 LoadClassSlowPathX86(HLoadClass* cls,
254 HInstruction* at,
255 uint32_t dex_pc,
256 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000257 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000258 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
259 }
260
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000261 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000262 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000263 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
264 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000265 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266
267 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000268 dex::TypeIndex type_index = cls_->GetTypeIndex();
269 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100270 x86_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage
271 : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000272 instruction_,
273 dex_pc_,
274 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000275 if (do_clinit_) {
276 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
277 } else {
278 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
279 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000280
281 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000282 Location out = locations->Out();
283 if (out.IsValid()) {
284 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
285 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000286 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000287 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000288 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
289 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
290 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
291 DCHECK(out.IsValid());
292 Register method_address = locations->InAt(0).AsRegister<Register>();
293 __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
294 locations->Out().AsRegister<Register>());
295 Label* fixup_label = x86_codegen->NewTypeBssEntryPatch(cls_);
296 __ Bind(fixup_label);
297 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000298 __ jmp(GetExitLabel());
299 }
300
Alexandre Rames9931f312015-06-19 14:47:01 +0100301 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86"; }
302
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000303 private:
304 // The class this slow path will load.
305 HLoadClass* const cls_;
306
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000307 // The dex PC of `at_`.
308 const uint32_t dex_pc_;
309
310 // Whether to initialize the class.
311 const bool do_clinit_;
312
313 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
314};
315
Andreas Gampe85b62f22015-09-09 13:15:38 -0700316class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000317 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000318 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000319 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000320
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000321 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000322 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000323 DCHECK(instruction_->IsCheckCast()
324 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000325
326 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
327 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000328
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000329 if (!is_fatal_) {
330 SaveLiveRegisters(codegen, locations);
331 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000332
333 // We're moving two locations to locations that could overlap, so we need a parallel
334 // move resolver.
335 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800336 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800337 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
338 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800339 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800340 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
341 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000342 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100343 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100344 instruction_,
345 instruction_->GetDexPc(),
346 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800347 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000348 } else {
349 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800350 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
351 instruction_,
352 instruction_->GetDexPc(),
353 this);
354 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000355 }
356
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000357 if (!is_fatal_) {
358 if (instruction_->IsInstanceOf()) {
359 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
360 }
361 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000362
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000363 __ jmp(GetExitLabel());
364 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000365 }
366
Alexandre Rames9931f312015-06-19 14:47:01 +0100367 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000368 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100369
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000370 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000371 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000372
373 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
374};
375
Andreas Gampe85b62f22015-09-09 13:15:38 -0700376class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700377 public:
Aart Bik42249c32016-01-07 15:33:50 -0800378 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000379 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700380
381 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +0100382 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700383 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100384 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000385 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 }
387
Alexandre Rames9931f312015-06-19 14:47:01 +0100388 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86"; }
389
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700390 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700391 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
392};
393
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100394class ArraySetSlowPathX86 : public SlowPathCode {
395 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000396 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100397
398 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
399 LocationSummary* locations = instruction_->GetLocations();
400 __ Bind(GetEntryLabel());
401 SaveLiveRegisters(codegen, locations);
402
403 InvokeRuntimeCallingConvention calling_convention;
404 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
405 parallel_move.AddMove(
406 locations->InAt(0),
407 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
408 Primitive::kPrimNot,
409 nullptr);
410 parallel_move.AddMove(
411 locations->InAt(1),
412 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
413 Primitive::kPrimInt,
414 nullptr);
415 parallel_move.AddMove(
416 locations->InAt(2),
417 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
418 Primitive::kPrimNot,
419 nullptr);
420 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
421
422 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100423 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000424 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100425 RestoreLiveRegisters(codegen, locations);
426 __ jmp(GetExitLabel());
427 }
428
429 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86"; }
430
431 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100432 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
433};
434
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100435// Slow path marking an object reference `ref` during a read
436// barrier. The field `obj.field` in the object `obj` holding this
437// reference does not get updated by this slow path after marking (see
438// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
439//
440// This means that after the execution of this slow path, `ref` will
441// always be up-to-date, but `obj.field` may not; i.e., after the
442// flip, `ref` will be a to-space reference, but `obj.field` will
443// probably still be a from-space reference (unless it gets updated by
444// another thread, or if another thread installed another object
445// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000446class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
447 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100448 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
449 Location ref,
450 bool unpoison_ref_before_marking)
451 : SlowPathCode(instruction),
452 ref_(ref),
453 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000454 DCHECK(kEmitCompilerReadBarrier);
455 }
456
457 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86"; }
458
459 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
460 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100461 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000462 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100463 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000464 DCHECK(instruction_->IsInstanceFieldGet() ||
465 instruction_->IsStaticFieldGet() ||
466 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100467 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000468 instruction_->IsLoadClass() ||
469 instruction_->IsLoadString() ||
470 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100471 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100472 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
473 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000474 << "Unexpected instruction in read barrier marking slow path: "
475 << instruction_->DebugName();
476
477 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100478 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000479 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100480 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000481 }
Roland Levillain4359e612016-07-20 11:32:19 +0100482 // No need to save live registers; it's taken care of by the
483 // entrypoint. Also, there is no need to update the stack mask,
484 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000485 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100486 DCHECK_NE(ref_reg, ESP);
487 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100488 // "Compact" slow path, saving two moves.
489 //
490 // Instead of using the standard runtime calling convention (input
491 // and output in EAX):
492 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100493 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100494 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100495 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100496 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100497 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100498 // of a dedicated entrypoint:
499 //
500 // rX <- ReadBarrierMarkRegX(rX)
501 //
502 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100503 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100504 // This runtime call does not require a stack map.
505 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000506 __ jmp(GetExitLabel());
507 }
508
509 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100510 // The location (register) of the marked object reference.
511 const Location ref_;
512 // Should the reference in `ref_` be unpoisoned prior to marking it?
513 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000514
515 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
516};
517
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100518// Slow path marking an object reference `ref` during a read barrier,
519// and if needed, atomically updating the field `obj.field` in the
520// object `obj` holding this reference after marking (contrary to
521// ReadBarrierMarkSlowPathX86 above, which never tries to update
522// `obj.field`).
523//
524// This means that after the execution of this slow path, both `ref`
525// and `obj.field` will be up-to-date; i.e., after the flip, both will
526// hold the same to-space reference (unless another thread installed
527// another object reference (different from `ref`) in `obj.field`).
528class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
529 public:
530 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
531 Location ref,
532 Register obj,
533 const Address& field_addr,
534 bool unpoison_ref_before_marking,
535 Register temp)
536 : SlowPathCode(instruction),
537 ref_(ref),
538 obj_(obj),
539 field_addr_(field_addr),
540 unpoison_ref_before_marking_(unpoison_ref_before_marking),
541 temp_(temp) {
542 DCHECK(kEmitCompilerReadBarrier);
543 }
544
545 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
546
547 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
548 LocationSummary* locations = instruction_->GetLocations();
549 Register ref_reg = ref_.AsRegister<Register>();
550 DCHECK(locations->CanCall());
551 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
552 // This slow path is only used by the UnsafeCASObject intrinsic.
553 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
554 << "Unexpected instruction in read barrier marking and field updating slow path: "
555 << instruction_->DebugName();
556 DCHECK(instruction_->GetLocations()->Intrinsified());
557 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
558
559 __ Bind(GetEntryLabel());
560 if (unpoison_ref_before_marking_) {
561 // Object* ref = ref_addr->AsMirrorPtr()
562 __ MaybeUnpoisonHeapReference(ref_reg);
563 }
564
565 // Save the old (unpoisoned) reference.
566 __ movl(temp_, ref_reg);
567
568 // No need to save live registers; it's taken care of by the
569 // entrypoint. Also, there is no need to update the stack mask,
570 // as this runtime call will not trigger a garbage collection.
571 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
572 DCHECK_NE(ref_reg, ESP);
573 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
574 // "Compact" slow path, saving two moves.
575 //
576 // Instead of using the standard runtime calling convention (input
577 // and output in EAX):
578 //
579 // EAX <- ref
580 // EAX <- ReadBarrierMark(EAX)
581 // ref <- EAX
582 //
583 // we just use rX (the register containing `ref`) as input and output
584 // of a dedicated entrypoint:
585 //
586 // rX <- ReadBarrierMarkRegX(rX)
587 //
588 int32_t entry_point_offset =
589 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
590 // This runtime call does not require a stack map.
591 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
592
593 // If the new reference is different from the old reference,
594 // update the field in the holder (`*field_addr`).
595 //
596 // Note that this field could also hold a different object, if
597 // another thread had concurrently changed it. In that case, the
598 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
599 // operation below would abort the CAS, leaving the field as-is.
600 NearLabel done;
601 __ cmpl(temp_, ref_reg);
602 __ j(kEqual, &done);
603
604 // Update the the holder's field atomically. This may fail if
605 // mutator updates before us, but it's OK. This is achieved
606 // using a strong compare-and-set (CAS) operation with relaxed
607 // memory synchronization ordering, where the expected value is
608 // the old reference and the desired value is the new reference.
609 // This operation is implemented with a 32-bit LOCK CMPXLCHG
610 // instruction, which requires the expected value (the old
611 // reference) to be in EAX. Save EAX beforehand, and move the
612 // expected value (stored in `temp_`) into EAX.
613 __ pushl(EAX);
614 __ movl(EAX, temp_);
615
616 // Convenience aliases.
617 Register base = obj_;
618 Register expected = EAX;
619 Register value = ref_reg;
620
621 bool base_equals_value = (base == value);
622 if (kPoisonHeapReferences) {
623 if (base_equals_value) {
624 // If `base` and `value` are the same register location, move
625 // `value` to a temporary register. This way, poisoning
626 // `value` won't invalidate `base`.
627 value = temp_;
628 __ movl(value, base);
629 }
630
631 // Check that the register allocator did not assign the location
632 // of `expected` (EAX) to `value` nor to `base`, so that heap
633 // poisoning (when enabled) works as intended below.
634 // - If `value` were equal to `expected`, both references would
635 // be poisoned twice, meaning they would not be poisoned at
636 // all, as heap poisoning uses address negation.
637 // - If `base` were equal to `expected`, poisoning `expected`
638 // would invalidate `base`.
639 DCHECK_NE(value, expected);
640 DCHECK_NE(base, expected);
641
642 __ PoisonHeapReference(expected);
643 __ PoisonHeapReference(value);
644 }
645
646 __ LockCmpxchgl(field_addr_, value);
647
648 // If heap poisoning is enabled, we need to unpoison the values
649 // that were poisoned earlier.
650 if (kPoisonHeapReferences) {
651 if (base_equals_value) {
652 // `value` has been moved to a temporary register, no need
653 // to unpoison it.
654 } else {
655 __ UnpoisonHeapReference(value);
656 }
657 // No need to unpoison `expected` (EAX), as it is be overwritten below.
658 }
659
660 // Restore EAX.
661 __ popl(EAX);
662
663 __ Bind(&done);
664 __ jmp(GetExitLabel());
665 }
666
667 private:
668 // The location (register) of the marked object reference.
669 const Location ref_;
670 // The register containing the object holding the marked object reference field.
671 const Register obj_;
672 // The address of the marked reference field. The base of this address must be `obj_`.
673 const Address field_addr_;
674
675 // Should the reference in `ref_` be unpoisoned prior to marking it?
676 const bool unpoison_ref_before_marking_;
677
678 const Register temp_;
679
680 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
681};
682
Roland Levillain0d5a2812015-11-13 10:07:31 +0000683// Slow path generating a read barrier for a heap reference.
684class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
685 public:
686 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
687 Location out,
688 Location ref,
689 Location obj,
690 uint32_t offset,
691 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000692 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000693 out_(out),
694 ref_(ref),
695 obj_(obj),
696 offset_(offset),
697 index_(index) {
698 DCHECK(kEmitCompilerReadBarrier);
699 // If `obj` is equal to `out` or `ref`, it means the initial object
700 // has been overwritten by (or after) the heap object reference load
701 // to be instrumented, e.g.:
702 //
703 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000704 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000705 //
706 // In that case, we have lost the information about the original
707 // object, and the emitted read barrier cannot work properly.
708 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
709 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
710 }
711
712 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
713 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
714 LocationSummary* locations = instruction_->GetLocations();
715 Register reg_out = out_.AsRegister<Register>();
716 DCHECK(locations->CanCall());
717 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100718 DCHECK(instruction_->IsInstanceFieldGet() ||
719 instruction_->IsStaticFieldGet() ||
720 instruction_->IsArrayGet() ||
721 instruction_->IsInstanceOf() ||
722 instruction_->IsCheckCast() ||
Roland Levillaindec8f632016-07-22 17:10:06 +0100723 (instruction_->IsInvokeVirtual()) && instruction_->GetLocations()->Intrinsified())
Roland Levillain7c1559a2015-12-15 10:55:36 +0000724 << "Unexpected instruction in read barrier for heap reference slow path: "
725 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000726
727 __ Bind(GetEntryLabel());
728 SaveLiveRegisters(codegen, locations);
729
730 // We may have to change the index's value, but as `index_` is a
731 // constant member (like other "inputs" of this slow path),
732 // introduce a copy of it, `index`.
733 Location index = index_;
734 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100735 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000736 if (instruction_->IsArrayGet()) {
737 // Compute the actual memory offset and store it in `index`.
738 Register index_reg = index_.AsRegister<Register>();
739 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
740 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
741 // We are about to change the value of `index_reg` (see the
742 // calls to art::x86::X86Assembler::shll and
743 // art::x86::X86Assembler::AddImmediate below), but it has
744 // not been saved by the previous call to
745 // art::SlowPathCode::SaveLiveRegisters, as it is a
746 // callee-save register --
747 // art::SlowPathCode::SaveLiveRegisters does not consider
748 // callee-save registers, as it has been designed with the
749 // assumption that callee-save registers are supposed to be
750 // handled by the called function. So, as a callee-save
751 // register, `index_reg` _would_ eventually be saved onto
752 // the stack, but it would be too late: we would have
753 // changed its value earlier. Therefore, we manually save
754 // it here into another freely available register,
755 // `free_reg`, chosen of course among the caller-save
756 // registers (as a callee-save `free_reg` register would
757 // exhibit the same problem).
758 //
759 // Note we could have requested a temporary register from
760 // the register allocator instead; but we prefer not to, as
761 // this is a slow path, and we know we can find a
762 // caller-save register that is available.
763 Register free_reg = FindAvailableCallerSaveRegister(codegen);
764 __ movl(free_reg, index_reg);
765 index_reg = free_reg;
766 index = Location::RegisterLocation(index_reg);
767 } else {
768 // The initial register stored in `index_` has already been
769 // saved in the call to art::SlowPathCode::SaveLiveRegisters
770 // (as it is not a callee-save register), so we can freely
771 // use it.
772 }
773 // Shifting the index value contained in `index_reg` by the scale
774 // factor (2) cannot overflow in practice, as the runtime is
775 // unable to allocate object arrays with a size larger than
776 // 2^26 - 1 (that is, 2^28 - 4 bytes).
777 __ shll(index_reg, Immediate(TIMES_4));
778 static_assert(
779 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
780 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
781 __ AddImmediate(index_reg, Immediate(offset_));
782 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100783 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
784 // intrinsics, `index_` is not shifted by a scale factor of 2
785 // (as in the case of ArrayGet), as it is actually an offset
786 // to an object field within an object.
787 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000788 DCHECK(instruction_->GetLocations()->Intrinsified());
789 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
790 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
791 << instruction_->AsInvoke()->GetIntrinsic();
792 DCHECK_EQ(offset_, 0U);
793 DCHECK(index_.IsRegisterPair());
794 // UnsafeGet's offset location is a register pair, the low
795 // part contains the correct offset.
796 index = index_.ToLow();
797 }
798 }
799
800 // We're moving two or three locations to locations that could
801 // overlap, so we need a parallel move resolver.
802 InvokeRuntimeCallingConvention calling_convention;
803 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
804 parallel_move.AddMove(ref_,
805 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
806 Primitive::kPrimNot,
807 nullptr);
808 parallel_move.AddMove(obj_,
809 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
810 Primitive::kPrimNot,
811 nullptr);
812 if (index.IsValid()) {
813 parallel_move.AddMove(index,
814 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
815 Primitive::kPrimInt,
816 nullptr);
817 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
818 } else {
819 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
820 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
821 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100822 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000823 CheckEntrypointTypes<
824 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
825 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
826
827 RestoreLiveRegisters(codegen, locations);
828 __ jmp(GetExitLabel());
829 }
830
831 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathX86"; }
832
833 private:
834 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
835 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
836 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
837 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
838 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
839 return static_cast<Register>(i);
840 }
841 }
842 // We shall never fail to find a free caller-save register, as
843 // there are more than two core caller-save registers on x86
844 // (meaning it is possible to find one which is different from
845 // `ref` and `obj`).
846 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
847 LOG(FATAL) << "Could not find a free caller-save register";
848 UNREACHABLE();
849 }
850
Roland Levillain0d5a2812015-11-13 10:07:31 +0000851 const Location out_;
852 const Location ref_;
853 const Location obj_;
854 const uint32_t offset_;
855 // An additional location containing an index to an array.
856 // Only used for HArrayGet and the UnsafeGetObject &
857 // UnsafeGetObjectVolatile intrinsics.
858 const Location index_;
859
860 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
861};
862
863// Slow path generating a read barrier for a GC root.
864class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
865 public:
866 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000867 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000868 DCHECK(kEmitCompilerReadBarrier);
869 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000870
871 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
872 LocationSummary* locations = instruction_->GetLocations();
873 Register reg_out = out_.AsRegister<Register>();
874 DCHECK(locations->CanCall());
875 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000876 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
877 << "Unexpected instruction in read barrier for GC root slow path: "
878 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000879
880 __ Bind(GetEntryLabel());
881 SaveLiveRegisters(codegen, locations);
882
883 InvokeRuntimeCallingConvention calling_convention;
884 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
885 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100886 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000887 instruction_,
888 instruction_->GetDexPc(),
889 this);
890 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
891 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
892
893 RestoreLiveRegisters(codegen, locations);
894 __ jmp(GetExitLabel());
895 }
896
897 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86"; }
898
899 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000900 const Location out_;
901 const Location root_;
902
903 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
904};
905
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100906#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100907// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
908#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100909
Aart Bike9f37602015-10-09 11:15:55 -0700910inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700911 switch (cond) {
912 case kCondEQ: return kEqual;
913 case kCondNE: return kNotEqual;
914 case kCondLT: return kLess;
915 case kCondLE: return kLessEqual;
916 case kCondGT: return kGreater;
917 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700918 case kCondB: return kBelow;
919 case kCondBE: return kBelowEqual;
920 case kCondA: return kAbove;
921 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700922 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100923 LOG(FATAL) << "Unreachable";
924 UNREACHABLE();
925}
926
Aart Bike9f37602015-10-09 11:15:55 -0700927// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100928inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
929 switch (cond) {
930 case kCondEQ: return kEqual;
931 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700932 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100933 case kCondLT: return kBelow;
934 case kCondLE: return kBelowEqual;
935 case kCondGT: return kAbove;
936 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700937 // Unsigned remain unchanged.
938 case kCondB: return kBelow;
939 case kCondBE: return kBelowEqual;
940 case kCondA: return kAbove;
941 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100942 }
943 LOG(FATAL) << "Unreachable";
944 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700945}
946
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100947void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100948 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100949}
950
951void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100952 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100953}
954
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100955size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
956 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
957 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100958}
959
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100960size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
961 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
962 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100963}
964
Mark Mendell7c8d0092015-01-26 11:21:33 -0500965size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
966 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
967 return GetFloatingPointSpillSlotSize();
968}
969
970size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
971 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
972 return GetFloatingPointSpillSlotSize();
973}
974
Calin Juravle175dc732015-08-25 15:42:32 +0100975void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
976 HInstruction* instruction,
977 uint32_t dex_pc,
978 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100979 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100980 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
981 if (EntrypointRequiresStackMap(entrypoint)) {
982 RecordPcInfo(instruction, dex_pc, slow_path);
983 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100984}
985
Roland Levillaindec8f632016-07-22 17:10:06 +0100986void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
987 HInstruction* instruction,
988 SlowPathCode* slow_path) {
989 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100990 GenerateInvokeRuntime(entry_point_offset);
991}
992
993void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +0100994 __ fs()->call(Address::Absolute(entry_point_offset));
995}
996
Mark Mendellfb8d2792015-03-31 22:16:59 -0400997CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000998 const X86InstructionSetFeatures& isa_features,
999 const CompilerOptions& compiler_options,
1000 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001001 : CodeGenerator(graph,
1002 kNumberOfCpuRegisters,
1003 kNumberOfXmmRegisters,
1004 kNumberOfRegisterPairs,
1005 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1006 arraysize(kCoreCalleeSaves))
1007 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001008 0,
1009 compiler_options,
1010 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001011 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001012 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001013 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001014 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001015 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001016 isa_features_(isa_features),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001017 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001018 simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1019 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01001020 type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001021 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001022 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001023 constant_area_start_(-1),
1024 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1025 method_address_offset_(-1) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001026 // Use a fake return address register to mimic Quick.
1027 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001028}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001029
David Brazdil58282f42016-01-14 12:45:10 +00001030void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001031 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001032 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001033}
1034
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001035InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001036 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001037 assembler_(codegen->GetAssembler()),
1038 codegen_(codegen) {}
1039
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001040static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001041 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001042}
1043
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001044void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001045 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001046 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001047 bool skip_overflow_check =
1048 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001049 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001050
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001051 if (!skip_overflow_check) {
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001052 __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001053 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001054 }
1055
Mark Mendell5f874182015-03-04 15:42:45 -05001056 if (HasEmptyFrame()) {
1057 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001058 }
Mark Mendell5f874182015-03-04 15:42:45 -05001059
1060 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1061 Register reg = kCoreCalleeSaves[i];
1062 if (allocated_registers_.ContainsCoreRegister(reg)) {
1063 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001064 __ cfi().AdjustCFAOffset(kX86WordSize);
1065 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001066 }
1067 }
1068
Mingyao Yang063fc772016-08-02 11:02:54 -07001069 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1070 // Initialize should_deoptimize flag to 0.
1071 __ movl(Address(ESP, -kShouldDeoptimizeFlagSize), Immediate(0));
1072 }
1073
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001074 int adjust = GetFrameSize() - FrameEntrySpillSize();
1075 __ subl(ESP, Immediate(adjust));
1076 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001077 // Save the current method if we need it. Note that we do not
1078 // do this in HCurrentMethod, as the instruction might have been removed
1079 // in the SSA graph.
1080 if (RequiresCurrentMethod()) {
1081 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1082 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001083}
1084
1085void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001086 __ cfi().RememberState();
1087 if (!HasEmptyFrame()) {
1088 int adjust = GetFrameSize() - FrameEntrySpillSize();
1089 __ addl(ESP, Immediate(adjust));
1090 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001091
David Srbeckyc34dc932015-04-12 09:27:43 +01001092 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1093 Register reg = kCoreCalleeSaves[i];
1094 if (allocated_registers_.ContainsCoreRegister(reg)) {
1095 __ popl(reg);
1096 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1097 __ cfi().Restore(DWARFReg(reg));
1098 }
Mark Mendell5f874182015-03-04 15:42:45 -05001099 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001100 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001101 __ ret();
1102 __ cfi().RestoreState();
1103 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001104}
1105
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001106void CodeGeneratorX86::Bind(HBasicBlock* block) {
1107 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001108}
1109
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001110Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(Primitive::Type type) const {
1111 switch (type) {
1112 case Primitive::kPrimBoolean:
1113 case Primitive::kPrimByte:
1114 case Primitive::kPrimChar:
1115 case Primitive::kPrimShort:
1116 case Primitive::kPrimInt:
1117 case Primitive::kPrimNot:
1118 return Location::RegisterLocation(EAX);
1119
1120 case Primitive::kPrimLong:
1121 return Location::RegisterPairLocation(EAX, EDX);
1122
1123 case Primitive::kPrimVoid:
1124 return Location::NoLocation();
1125
1126 case Primitive::kPrimDouble:
1127 case Primitive::kPrimFloat:
1128 return Location::FpuRegisterLocation(XMM0);
1129 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001130
1131 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001132}
1133
1134Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1135 return Location::RegisterLocation(kMethodRegisterArgument);
1136}
1137
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001138Location InvokeDexCallingConventionVisitorX86::GetNextLocation(Primitive::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001139 switch (type) {
1140 case Primitive::kPrimBoolean:
1141 case Primitive::kPrimByte:
1142 case Primitive::kPrimChar:
1143 case Primitive::kPrimShort:
1144 case Primitive::kPrimInt:
1145 case Primitive::kPrimNot: {
1146 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001147 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001148 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001149 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001150 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001151 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001152 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001153 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001154
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001155 case Primitive::kPrimLong: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001156 uint32_t index = gp_index_;
1157 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001158 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001159 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001160 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1161 calling_convention.GetRegisterPairAt(index));
1162 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001163 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001164 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1165 }
1166 }
1167
1168 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001169 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001170 stack_index_++;
1171 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1172 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1173 } else {
1174 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1175 }
1176 }
1177
1178 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001179 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001180 stack_index_ += 2;
1181 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1182 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1183 } else {
1184 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001185 }
1186 }
1187
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001188 case Primitive::kPrimVoid:
1189 LOG(FATAL) << "Unexpected parameter type " << type;
1190 break;
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001191 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001192 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001193}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001194
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001195void CodeGeneratorX86::Move32(Location destination, Location source) {
1196 if (source.Equals(destination)) {
1197 return;
1198 }
1199 if (destination.IsRegister()) {
1200 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001201 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001202 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001203 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001204 } else {
1205 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001206 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001207 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001208 } else if (destination.IsFpuRegister()) {
1209 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001210 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001211 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001212 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001213 } else {
1214 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001215 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001216 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001217 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001218 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001219 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001220 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001221 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001222 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001223 } else if (source.IsConstant()) {
1224 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001225 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001226 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001227 } else {
1228 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001229 __ pushl(Address(ESP, source.GetStackIndex()));
1230 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001231 }
1232 }
1233}
1234
1235void CodeGeneratorX86::Move64(Location destination, Location source) {
1236 if (source.Equals(destination)) {
1237 return;
1238 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001239 if (destination.IsRegisterPair()) {
1240 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001241 EmitParallelMoves(
1242 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1243 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001244 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001245 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001246 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
1247 Primitive::kPrimInt);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001248 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001249 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1250 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1251 __ psrlq(src_reg, Immediate(32));
1252 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001253 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001254 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001255 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001256 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1257 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001258 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1259 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001260 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001261 if (source.IsFpuRegister()) {
1262 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1263 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001264 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001265 } else if (source.IsRegisterPair()) {
1266 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
1267 // Create stack space for 2 elements.
1268 __ subl(ESP, Immediate(2 * elem_size));
1269 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1270 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1271 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1272 // And remove the temporary stack space we allocated.
1273 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001274 } else {
1275 LOG(FATAL) << "Unimplemented";
1276 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001277 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001278 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001279 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001280 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001281 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001282 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001283 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001284 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001285 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001286 } else if (source.IsConstant()) {
1287 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001288 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1289 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001290 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001291 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1292 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001293 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001294 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001295 EmitParallelMoves(
1296 Location::StackSlot(source.GetStackIndex()),
1297 Location::StackSlot(destination.GetStackIndex()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001298 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001299 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001300 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
1301 Primitive::kPrimInt);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001302 }
1303 }
1304}
1305
Calin Juravle175dc732015-08-25 15:42:32 +01001306void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1307 DCHECK(location.IsRegister());
1308 __ movl(location.AsRegister<Register>(), Immediate(value));
1309}
1310
Calin Juravlee460d1d2015-09-29 04:52:17 +01001311void CodeGeneratorX86::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001312 HParallelMove move(GetGraph()->GetArena());
1313 if (dst_type == Primitive::kPrimLong && !src.IsConstant() && !src.IsFpuRegister()) {
1314 move.AddMove(src.ToLow(), dst.ToLow(), Primitive::kPrimInt, nullptr);
1315 move.AddMove(src.ToHigh(), dst.ToHigh(), Primitive::kPrimInt, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001316 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001317 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001318 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001319 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001320}
1321
1322void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1323 if (location.IsRegister()) {
1324 locations->AddTemp(location);
1325 } else if (location.IsRegisterPair()) {
1326 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1327 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1328 } else {
1329 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1330 }
1331}
1332
David Brazdilfc6a86a2015-06-26 10:33:45 +00001333void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001334 DCHECK(!successor->IsExitBlock());
1335
1336 HBasicBlock* block = got->GetBlock();
1337 HInstruction* previous = got->GetPrevious();
1338
1339 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001340 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001341 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1342 return;
1343 }
1344
1345 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1346 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1347 }
1348 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001349 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001350 }
1351}
1352
David Brazdilfc6a86a2015-06-26 10:33:45 +00001353void LocationsBuilderX86::VisitGoto(HGoto* got) {
1354 got->SetLocations(nullptr);
1355}
1356
1357void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1358 HandleGoto(got, got->GetSuccessor());
1359}
1360
1361void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1362 try_boundary->SetLocations(nullptr);
1363}
1364
1365void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1366 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1367 if (!successor->IsExitBlock()) {
1368 HandleGoto(try_boundary, successor);
1369 }
1370}
1371
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001372void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001373 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001374}
1375
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001376void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001377}
1378
Mark Mendell152408f2015-12-31 12:28:50 -05001379template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001380void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001381 LabelType* true_label,
1382 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001383 if (cond->IsFPConditionTrueIfNaN()) {
1384 __ j(kUnordered, true_label);
1385 } else if (cond->IsFPConditionFalseIfNaN()) {
1386 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001387 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001388 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001389}
1390
Mark Mendell152408f2015-12-31 12:28:50 -05001391template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001392void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001393 LabelType* true_label,
1394 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001395 LocationSummary* locations = cond->GetLocations();
1396 Location left = locations->InAt(0);
1397 Location right = locations->InAt(1);
1398 IfCondition if_cond = cond->GetCondition();
1399
Mark Mendellc4701932015-04-10 13:18:51 -04001400 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001401 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001402 IfCondition true_high_cond = if_cond;
1403 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001404 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001405
1406 // Set the conditions for the test, remembering that == needs to be
1407 // decided using the low words.
1408 switch (if_cond) {
1409 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001410 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001411 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001412 break;
1413 case kCondLT:
1414 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001415 break;
1416 case kCondLE:
1417 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001418 break;
1419 case kCondGT:
1420 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001421 break;
1422 case kCondGE:
1423 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001424 break;
Aart Bike9f37602015-10-09 11:15:55 -07001425 case kCondB:
1426 false_high_cond = kCondA;
1427 break;
1428 case kCondBE:
1429 true_high_cond = kCondB;
1430 break;
1431 case kCondA:
1432 false_high_cond = kCondB;
1433 break;
1434 case kCondAE:
1435 true_high_cond = kCondA;
1436 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001437 }
1438
1439 if (right.IsConstant()) {
1440 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001441 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001442 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001443
Aart Bika19616e2016-02-01 18:57:58 -08001444 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001445 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001446 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001447 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001448 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001449 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001450 __ j(X86Condition(true_high_cond), true_label);
1451 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001452 }
1453 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001454 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001455 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001456 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001457 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001458
1459 __ cmpl(left_high, right_high);
1460 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001461 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001462 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001463 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001464 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001465 __ j(X86Condition(true_high_cond), true_label);
1466 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001467 }
1468 // Must be equal high, so compare the lows.
1469 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001470 } else {
1471 DCHECK(right.IsDoubleStackSlot());
1472 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1473 if (if_cond == kCondNE) {
1474 __ j(X86Condition(true_high_cond), true_label);
1475 } else if (if_cond == kCondEQ) {
1476 __ j(X86Condition(false_high_cond), false_label);
1477 } else {
1478 __ j(X86Condition(true_high_cond), true_label);
1479 __ j(X86Condition(false_high_cond), false_label);
1480 }
1481 // Must be equal high, so compare the lows.
1482 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001483 }
1484 // The last comparison might be unsigned.
1485 __ j(final_condition, true_label);
1486}
1487
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001488void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1489 Location rhs,
1490 HInstruction* insn,
1491 bool is_double) {
1492 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1493 if (is_double) {
1494 if (rhs.IsFpuRegister()) {
1495 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1496 } else if (const_area != nullptr) {
1497 DCHECK(const_area->IsEmittedAtUseSite());
1498 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1499 codegen_->LiteralDoubleAddress(
1500 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1501 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
1502 } else {
1503 DCHECK(rhs.IsDoubleStackSlot());
1504 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1505 }
1506 } else {
1507 if (rhs.IsFpuRegister()) {
1508 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1509 } else if (const_area != nullptr) {
1510 DCHECK(const_area->IsEmittedAtUseSite());
1511 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1512 codegen_->LiteralFloatAddress(
1513 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1514 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
1515 } else {
1516 DCHECK(rhs.IsStackSlot());
1517 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1518 }
1519 }
1520}
1521
Mark Mendell152408f2015-12-31 12:28:50 -05001522template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001523void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001524 LabelType* true_target_in,
1525 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001526 // Generated branching requires both targets to be explicit. If either of the
1527 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001528 LabelType fallthrough_target;
1529 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1530 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001531
Mark Mendellc4701932015-04-10 13:18:51 -04001532 LocationSummary* locations = condition->GetLocations();
1533 Location left = locations->InAt(0);
1534 Location right = locations->InAt(1);
1535
Mark Mendellc4701932015-04-10 13:18:51 -04001536 Primitive::Type type = condition->InputAt(0)->GetType();
1537 switch (type) {
1538 case Primitive::kPrimLong:
1539 GenerateLongComparesAndJumps(condition, true_target, false_target);
1540 break;
1541 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001542 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001543 GenerateFPJumps(condition, true_target, false_target);
1544 break;
1545 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001546 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001547 GenerateFPJumps(condition, true_target, false_target);
1548 break;
1549 default:
1550 LOG(FATAL) << "Unexpected compare type " << type;
1551 }
1552
David Brazdil0debae72015-11-12 18:37:00 +00001553 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001554 __ jmp(false_target);
1555 }
David Brazdil0debae72015-11-12 18:37:00 +00001556
1557 if (fallthrough_target.IsLinked()) {
1558 __ Bind(&fallthrough_target);
1559 }
Mark Mendellc4701932015-04-10 13:18:51 -04001560}
1561
David Brazdil0debae72015-11-12 18:37:00 +00001562static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1563 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1564 // are set only strictly before `branch`. We can't use the eflags on long/FP
1565 // conditions if they are materialized due to the complex branching.
1566 return cond->IsCondition() &&
1567 cond->GetNext() == branch &&
1568 cond->InputAt(0)->GetType() != Primitive::kPrimLong &&
1569 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1570}
1571
Mark Mendell152408f2015-12-31 12:28:50 -05001572template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001573void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001574 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001575 LabelType* true_target,
1576 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001577 HInstruction* cond = instruction->InputAt(condition_input_index);
1578
1579 if (true_target == nullptr && false_target == nullptr) {
1580 // Nothing to do. The code always falls through.
1581 return;
1582 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001583 // Constant condition, statically compared against "true" (integer value 1).
1584 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001585 if (true_target != nullptr) {
1586 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001587 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001588 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001589 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001590 if (false_target != nullptr) {
1591 __ jmp(false_target);
1592 }
1593 }
1594 return;
1595 }
1596
1597 // The following code generates these patterns:
1598 // (1) true_target == nullptr && false_target != nullptr
1599 // - opposite condition true => branch to false_target
1600 // (2) true_target != nullptr && false_target == nullptr
1601 // - condition true => branch to true_target
1602 // (3) true_target != nullptr && false_target != nullptr
1603 // - condition true => branch to true_target
1604 // - branch to false_target
1605 if (IsBooleanValueOrMaterializedCondition(cond)) {
1606 if (AreEflagsSetFrom(cond, instruction)) {
1607 if (true_target == nullptr) {
1608 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1609 } else {
1610 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1611 }
1612 } else {
1613 // Materialized condition, compare against 0.
1614 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1615 if (lhs.IsRegister()) {
1616 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1617 } else {
1618 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1619 }
1620 if (true_target == nullptr) {
1621 __ j(kEqual, false_target);
1622 } else {
1623 __ j(kNotEqual, true_target);
1624 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001625 }
1626 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001627 // Condition has not been materialized, use its inputs as the comparison and
1628 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001629 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001630
1631 // If this is a long or FP comparison that has been folded into
1632 // the HCondition, generate the comparison directly.
1633 Primitive::Type type = condition->InputAt(0)->GetType();
1634 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1635 GenerateCompareTestAndBranch(condition, true_target, false_target);
1636 return;
1637 }
1638
1639 Location lhs = condition->GetLocations()->InAt(0);
1640 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001641 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001642 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001643 if (true_target == nullptr) {
1644 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1645 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001646 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001647 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001648 }
David Brazdil0debae72015-11-12 18:37:00 +00001649
1650 // If neither branch falls through (case 3), the conditional branch to `true_target`
1651 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1652 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001653 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001654 }
1655}
1656
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001657void LocationsBuilderX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001658 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1659 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001660 locations->SetInAt(0, Location::Any());
1661 }
1662}
1663
1664void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001665 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1666 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1667 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1668 nullptr : codegen_->GetLabelOf(true_successor);
1669 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1670 nullptr : codegen_->GetLabelOf(false_successor);
1671 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001672}
1673
1674void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
1675 LocationSummary* locations = new (GetGraph()->GetArena())
1676 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001677 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001678 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001679 locations->SetInAt(0, Location::Any());
1680 }
1681}
1682
1683void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001684 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001685 GenerateTestAndBranch<Label>(deoptimize,
1686 /* condition_input_index */ 0,
1687 slow_path->GetEntryLabel(),
1688 /* false_target */ nullptr);
1689}
1690
Mingyao Yang063fc772016-08-02 11:02:54 -07001691void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1692 LocationSummary* locations = new (GetGraph()->GetArena())
1693 LocationSummary(flag, LocationSummary::kNoCall);
1694 locations->SetOut(Location::RequiresRegister());
1695}
1696
1697void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1698 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
1699 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1700}
1701
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001702static bool SelectCanUseCMOV(HSelect* select) {
1703 // There are no conditional move instructions for XMMs.
1704 if (Primitive::IsFloatingPointType(select->GetType())) {
1705 return false;
1706 }
1707
1708 // A FP condition doesn't generate the single CC that we need.
1709 // In 32 bit mode, a long condition doesn't generate a single CC either.
1710 HInstruction* condition = select->GetCondition();
1711 if (condition->IsCondition()) {
1712 Primitive::Type compare_type = condition->InputAt(0)->GetType();
1713 if (compare_type == Primitive::kPrimLong ||
1714 Primitive::IsFloatingPointType(compare_type)) {
1715 return false;
1716 }
1717 }
1718
1719 // We can generate a CMOV for this Select.
1720 return true;
1721}
1722
David Brazdil74eb1b22015-12-14 11:44:01 +00001723void LocationsBuilderX86::VisitSelect(HSelect* select) {
1724 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001725 if (Primitive::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001726 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001727 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001728 } else {
1729 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001730 if (SelectCanUseCMOV(select)) {
1731 if (select->InputAt(1)->IsConstant()) {
1732 // Cmov can't handle a constant value.
1733 locations->SetInAt(1, Location::RequiresRegister());
1734 } else {
1735 locations->SetInAt(1, Location::Any());
1736 }
1737 } else {
1738 locations->SetInAt(1, Location::Any());
1739 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001740 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001741 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1742 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001743 }
1744 locations->SetOut(Location::SameAsFirstInput());
1745}
1746
1747void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1748 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001749 DCHECK(locations->InAt(0).Equals(locations->Out()));
1750 if (SelectCanUseCMOV(select)) {
1751 // If both the condition and the source types are integer, we can generate
1752 // a CMOV to implement Select.
1753
1754 HInstruction* select_condition = select->GetCondition();
1755 Condition cond = kNotEqual;
1756
1757 // Figure out how to test the 'condition'.
1758 if (select_condition->IsCondition()) {
1759 HCondition* condition = select_condition->AsCondition();
1760 if (!condition->IsEmittedAtUseSite()) {
1761 // This was a previously materialized condition.
1762 // Can we use the existing condition code?
1763 if (AreEflagsSetFrom(condition, select)) {
1764 // Materialization was the previous instruction. Condition codes are right.
1765 cond = X86Condition(condition->GetCondition());
1766 } else {
1767 // No, we have to recreate the condition code.
1768 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1769 __ testl(cond_reg, cond_reg);
1770 }
1771 } else {
1772 // We can't handle FP or long here.
1773 DCHECK_NE(condition->InputAt(0)->GetType(), Primitive::kPrimLong);
1774 DCHECK(!Primitive::IsFloatingPointType(condition->InputAt(0)->GetType()));
1775 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001776 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001777 cond = X86Condition(condition->GetCondition());
1778 }
1779 } else {
1780 // Must be a boolean condition, which needs to be compared to 0.
1781 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1782 __ testl(cond_reg, cond_reg);
1783 }
1784
1785 // If the condition is true, overwrite the output, which already contains false.
1786 Location false_loc = locations->InAt(0);
1787 Location true_loc = locations->InAt(1);
1788 if (select->GetType() == Primitive::kPrimLong) {
1789 // 64 bit conditional move.
1790 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1791 Register false_low = false_loc.AsRegisterPairLow<Register>();
1792 if (true_loc.IsRegisterPair()) {
1793 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1794 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1795 } else {
1796 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1797 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1798 }
1799 } else {
1800 // 32 bit conditional move.
1801 Register false_reg = false_loc.AsRegister<Register>();
1802 if (true_loc.IsRegister()) {
1803 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1804 } else {
1805 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1806 }
1807 }
1808 } else {
1809 NearLabel false_target;
1810 GenerateTestAndBranch<NearLabel>(
1811 select, /* condition_input_index */ 2, /* true_target */ nullptr, &false_target);
1812 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1813 __ Bind(&false_target);
1814 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001815}
1816
David Srbecky0cf44932015-12-09 14:09:59 +00001817void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1818 new (GetGraph()->GetArena()) LocationSummary(info);
1819}
1820
David Srbeckyd28f4a02016-03-14 17:14:24 +00001821void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1822 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001823}
1824
1825void CodeGeneratorX86::GenerateNop() {
1826 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001827}
1828
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001829void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001830 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001831 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001832 // Handle the long/FP comparisons made in instruction simplification.
1833 switch (cond->InputAt(0)->GetType()) {
1834 case Primitive::kPrimLong: {
1835 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001836 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001837 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001838 locations->SetOut(Location::RequiresRegister());
1839 }
1840 break;
1841 }
1842 case Primitive::kPrimFloat:
1843 case Primitive::kPrimDouble: {
1844 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001845 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1846 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1847 } else if (cond->InputAt(1)->IsConstant()) {
1848 locations->SetInAt(1, Location::RequiresFpuRegister());
1849 } else {
1850 locations->SetInAt(1, Location::Any());
1851 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001852 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001853 locations->SetOut(Location::RequiresRegister());
1854 }
1855 break;
1856 }
1857 default:
1858 locations->SetInAt(0, Location::RequiresRegister());
1859 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001860 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001861 // We need a byte register.
1862 locations->SetOut(Location::RegisterLocation(ECX));
1863 }
1864 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001865 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001866}
1867
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001868void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001869 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001870 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001871 }
Mark Mendellc4701932015-04-10 13:18:51 -04001872
1873 LocationSummary* locations = cond->GetLocations();
1874 Location lhs = locations->InAt(0);
1875 Location rhs = locations->InAt(1);
1876 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001877 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001878
1879 switch (cond->InputAt(0)->GetType()) {
1880 default: {
1881 // Integer case.
1882
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001883 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001884 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001885 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001886 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001887 return;
1888 }
1889 case Primitive::kPrimLong:
1890 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1891 break;
1892 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001893 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001894 GenerateFPJumps(cond, &true_label, &false_label);
1895 break;
1896 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001897 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001898 GenerateFPJumps(cond, &true_label, &false_label);
1899 break;
1900 }
1901
1902 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001903 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001904
Roland Levillain4fa13f62015-07-06 18:11:54 +01001905 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001906 __ Bind(&false_label);
1907 __ xorl(reg, reg);
1908 __ jmp(&done_label);
1909
Roland Levillain4fa13f62015-07-06 18:11:54 +01001910 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001911 __ Bind(&true_label);
1912 __ movl(reg, Immediate(1));
1913 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001914}
1915
1916void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001917 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001918}
1919
1920void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001921 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001922}
1923
1924void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001925 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001926}
1927
1928void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001929 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001930}
1931
1932void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001933 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001934}
1935
1936void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001937 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001938}
1939
1940void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001941 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001942}
1943
1944void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001945 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001946}
1947
1948void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001949 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001950}
1951
1952void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001953 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001954}
1955
1956void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001957 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001958}
1959
1960void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001961 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001962}
1963
Aart Bike9f37602015-10-09 11:15:55 -07001964void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001965 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001966}
1967
1968void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001969 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001970}
1971
1972void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001973 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001974}
1975
1976void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001977 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001978}
1979
1980void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001981 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001982}
1983
1984void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001985 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001986}
1987
1988void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001989 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001990}
1991
1992void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001993 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001994}
1995
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001996void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001997 LocationSummary* locations =
1998 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01001999 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002000}
2001
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002002void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002003 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002004}
2005
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002006void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2007 LocationSummary* locations =
2008 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2009 locations->SetOut(Location::ConstantLocation(constant));
2010}
2011
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002012void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002013 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002014}
2015
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002016void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002017 LocationSummary* locations =
2018 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002019 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002020}
2021
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002022void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002023 // Will be generated at use site.
2024}
2025
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002026void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2027 LocationSummary* locations =
2028 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2029 locations->SetOut(Location::ConstantLocation(constant));
2030}
2031
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002032void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002033 // Will be generated at use site.
2034}
2035
2036void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2037 LocationSummary* locations =
2038 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2039 locations->SetOut(Location::ConstantLocation(constant));
2040}
2041
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002042void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002043 // Will be generated at use site.
2044}
2045
Calin Juravle27df7582015-04-17 19:12:31 +01002046void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2047 memory_barrier->SetLocations(nullptr);
2048}
2049
2050void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002051 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002052}
2053
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002054void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002055 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002056}
2057
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002058void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002059 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002060}
2061
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002062void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002063 LocationSummary* locations =
2064 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002065 switch (ret->InputAt(0)->GetType()) {
2066 case Primitive::kPrimBoolean:
2067 case Primitive::kPrimByte:
2068 case Primitive::kPrimChar:
2069 case Primitive::kPrimShort:
2070 case Primitive::kPrimInt:
2071 case Primitive::kPrimNot:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002072 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002073 break;
2074
2075 case Primitive::kPrimLong:
2076 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002077 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002078 break;
2079
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002080 case Primitive::kPrimFloat:
2081 case Primitive::kPrimDouble:
2082 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002083 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002084 break;
2085
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002086 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002087 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002088 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002089}
2090
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002091void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002092 if (kIsDebugBuild) {
2093 switch (ret->InputAt(0)->GetType()) {
2094 case Primitive::kPrimBoolean:
2095 case Primitive::kPrimByte:
2096 case Primitive::kPrimChar:
2097 case Primitive::kPrimShort:
2098 case Primitive::kPrimInt:
2099 case Primitive::kPrimNot:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002100 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002101 break;
2102
2103 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002104 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2105 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002106 break;
2107
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002108 case Primitive::kPrimFloat:
2109 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002110 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002111 break;
2112
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002113 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002114 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002115 }
2116 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002117 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002118}
2119
Calin Juravle175dc732015-08-25 15:42:32 +01002120void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2121 // The trampoline uses the same calling convention as dex calling conventions,
2122 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2123 // the method_idx.
2124 HandleInvoke(invoke);
2125}
2126
2127void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2128 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2129}
2130
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002131void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002132 // Explicit clinit checks triggered by static invokes must have been pruned by
2133 // art::PrepareForRegisterAllocation.
2134 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002135
Mark Mendellfb8d2792015-03-31 22:16:59 -04002136 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002137 if (intrinsic.TryDispatch(invoke)) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002138 if (invoke->GetLocations()->CanCall() && invoke->HasPcRelativeDexCache()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002139 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002140 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002141 return;
2142 }
2143
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002144 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002145
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002146 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
2147 if (invoke->HasPcRelativeDexCache()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002148 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002149 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002150}
2151
Mark Mendell09ed1a32015-03-25 08:30:06 -04002152static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2153 if (invoke->GetLocations()->Intrinsified()) {
2154 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2155 intrinsic.Dispatch(invoke);
2156 return true;
2157 }
2158 return false;
2159}
2160
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002161void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002162 // Explicit clinit checks triggered by static invokes must have been pruned by
2163 // art::PrepareForRegisterAllocation.
2164 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002165
Mark Mendell09ed1a32015-03-25 08:30:06 -04002166 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2167 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002168 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002169
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002170 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002171 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002172 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Mingyao Yang8693fe12015-04-17 16:51:08 -07002173 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002174}
2175
2176void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002177 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2178 if (intrinsic.TryDispatch(invoke)) {
2179 return;
2180 }
2181
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002182 HandleInvoke(invoke);
2183}
2184
2185void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002186 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002187 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002188}
2189
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002190void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002191 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2192 return;
2193 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002194
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002195 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002196 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002197 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002198}
2199
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002200void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002201 // This call to HandleInvoke allocates a temporary (core) register
2202 // which is also used to transfer the hidden argument from FP to
2203 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002204 HandleInvoke(invoke);
2205 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002206 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002207}
2208
2209void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2210 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002211 LocationSummary* locations = invoke->GetLocations();
2212 Register temp = locations->GetTemp(0).AsRegister<Register>();
2213 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002214 Location receiver = locations->InAt(0);
2215 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2216
Roland Levillain0d5a2812015-11-13 10:07:31 +00002217 // Set the hidden argument. This is safe to do this here, as XMM7
2218 // won't be modified thereafter, before the `call` instruction.
2219 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002220 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002221 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002222
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002223 if (receiver.IsStackSlot()) {
2224 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002225 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002226 __ movl(temp, Address(temp, class_offset));
2227 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002228 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002229 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002230 }
Roland Levillain4d027112015-07-01 15:41:14 +01002231 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002232 // Instead of simply (possibly) unpoisoning `temp` here, we should
2233 // emit a read barrier for the previous class reference load.
2234 // However this is not required in practice, as this is an
2235 // intermediate/temporary reference and because the current
2236 // concurrent copying collector keeps the from-space memory
2237 // intact/accessible until the end of the marking phase (the
2238 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002239 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002240 // temp = temp->GetAddressOfIMT()
2241 __ movl(temp,
2242 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002243 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002244 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002245 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002246 __ movl(temp, Address(temp, method_offset));
2247 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002248 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002249 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002250
2251 DCHECK(!codegen_->IsLeafMethod());
2252 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2253}
2254
Orion Hodsonac141392017-01-13 11:53:47 +00002255void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2256 HandleInvoke(invoke);
2257}
2258
2259void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2260 codegen_->GenerateInvokePolymorphicCall(invoke);
2261}
2262
Roland Levillain88cb1752014-10-20 16:36:47 +01002263void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2264 LocationSummary* locations =
2265 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2266 switch (neg->GetResultType()) {
2267 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002268 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002269 locations->SetInAt(0, Location::RequiresRegister());
2270 locations->SetOut(Location::SameAsFirstInput());
2271 break;
2272
Roland Levillain88cb1752014-10-20 16:36:47 +01002273 case Primitive::kPrimFloat:
Roland Levillain5368c212014-11-27 15:03:41 +00002274 locations->SetInAt(0, Location::RequiresFpuRegister());
2275 locations->SetOut(Location::SameAsFirstInput());
2276 locations->AddTemp(Location::RequiresRegister());
2277 locations->AddTemp(Location::RequiresFpuRegister());
2278 break;
2279
Roland Levillain88cb1752014-10-20 16:36:47 +01002280 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002281 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002282 locations->SetOut(Location::SameAsFirstInput());
2283 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002284 break;
2285
2286 default:
2287 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2288 }
2289}
2290
2291void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2292 LocationSummary* locations = neg->GetLocations();
2293 Location out = locations->Out();
2294 Location in = locations->InAt(0);
2295 switch (neg->GetResultType()) {
2296 case Primitive::kPrimInt:
2297 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002298 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002299 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002300 break;
2301
2302 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002303 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002304 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002305 __ negl(out.AsRegisterPairLow<Register>());
2306 // Negation is similar to subtraction from zero. The least
2307 // significant byte triggers a borrow when it is different from
2308 // zero; to take it into account, add 1 to the most significant
2309 // byte if the carry flag (CF) is set to 1 after the first NEGL
2310 // operation.
2311 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2312 __ negl(out.AsRegisterPairHigh<Register>());
2313 break;
2314
Roland Levillain5368c212014-11-27 15:03:41 +00002315 case Primitive::kPrimFloat: {
2316 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002317 Register constant = locations->GetTemp(0).AsRegister<Register>();
2318 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002319 // Implement float negation with an exclusive or with value
2320 // 0x80000000 (mask for bit 31, representing the sign of a
2321 // single-precision floating-point number).
2322 __ movl(constant, Immediate(INT32_C(0x80000000)));
2323 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002324 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002325 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002326 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002327
Roland Levillain5368c212014-11-27 15:03:41 +00002328 case Primitive::kPrimDouble: {
2329 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002330 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002331 // Implement double negation with an exclusive or with value
2332 // 0x8000000000000000 (mask for bit 63, representing the sign of
2333 // a double-precision floating-point number).
2334 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002335 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002336 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002337 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002338
2339 default:
2340 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2341 }
2342}
2343
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002344void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2345 LocationSummary* locations =
2346 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2347 DCHECK(Primitive::IsFloatingPointType(neg->GetType()));
2348 locations->SetInAt(0, Location::RequiresFpuRegister());
2349 locations->SetInAt(1, Location::RequiresRegister());
2350 locations->SetOut(Location::SameAsFirstInput());
2351 locations->AddTemp(Location::RequiresFpuRegister());
2352}
2353
2354void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2355 LocationSummary* locations = neg->GetLocations();
2356 Location out = locations->Out();
2357 DCHECK(locations->InAt(0).Equals(out));
2358
2359 Register constant_area = locations->InAt(1).AsRegister<Register>();
2360 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2361 if (neg->GetType() == Primitive::kPrimFloat) {
2362 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000), constant_area));
2363 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2364 } else {
2365 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000), constant_area));
2366 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2367 }
2368}
2369
Roland Levillaindff1f282014-11-05 14:15:05 +00002370void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Roland Levillaindff1f282014-11-05 14:15:05 +00002371 Primitive::Type result_type = conversion->GetResultType();
2372 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002373 DCHECK_NE(result_type, input_type);
Roland Levillain624279f2014-12-04 11:54:28 +00002374
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002375 // The float-to-long and double-to-long type conversions rely on a
2376 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002377 LocationSummary::CallKind call_kind =
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002378 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
2379 && result_type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002380 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002381 : LocationSummary::kNoCall;
2382 LocationSummary* locations =
2383 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
2384
David Brazdilb2bd1c52015-03-25 11:17:37 +00002385 // The Java language does not allow treating boolean as an integral type but
2386 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002387
Roland Levillaindff1f282014-11-05 14:15:05 +00002388 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002389 case Primitive::kPrimByte:
2390 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002391 case Primitive::kPrimLong: {
2392 // Type conversion from long to byte is a result of code transformations.
2393 HInstruction* input = conversion->InputAt(0);
2394 Location input_location = input->IsConstant()
2395 ? Location::ConstantLocation(input->AsConstant())
2396 : Location::RegisterPairLocation(EAX, EDX);
2397 locations->SetInAt(0, input_location);
2398 // Make the output overlap to please the register allocator. This greatly simplifies
2399 // the validation of the linear scan implementation
2400 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2401 break;
2402 }
David Brazdil46e2a392015-03-16 17:31:52 +00002403 case Primitive::kPrimBoolean:
2404 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002405 case Primitive::kPrimShort:
2406 case Primitive::kPrimInt:
2407 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002408 // Processing a Dex `int-to-byte' instruction.
Mark Mendell5f874182015-03-04 15:42:45 -05002409 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2410 // Make the output overlap to please the register allocator. This greatly simplifies
2411 // the validation of the linear scan implementation
2412 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Roland Levillain51d3fc42014-11-13 14:11:42 +00002413 break;
2414
2415 default:
2416 LOG(FATAL) << "Unexpected type conversion from " << input_type
2417 << " to " << result_type;
2418 }
2419 break;
2420
Roland Levillain01a8d712014-11-14 16:27:39 +00002421 case Primitive::kPrimShort:
2422 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002423 case Primitive::kPrimLong:
2424 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002425 case Primitive::kPrimBoolean:
2426 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002427 case Primitive::kPrimByte:
2428 case Primitive::kPrimInt:
2429 case Primitive::kPrimChar:
2430 // Processing a Dex `int-to-short' instruction.
2431 locations->SetInAt(0, Location::Any());
2432 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2433 break;
2434
2435 default:
2436 LOG(FATAL) << "Unexpected type conversion from " << input_type
2437 << " to " << result_type;
2438 }
2439 break;
2440
Roland Levillain946e1432014-11-11 17:35:19 +00002441 case Primitive::kPrimInt:
2442 switch (input_type) {
2443 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002444 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002445 locations->SetInAt(0, Location::Any());
2446 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2447 break;
2448
2449 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002450 // Processing a Dex `float-to-int' instruction.
2451 locations->SetInAt(0, Location::RequiresFpuRegister());
2452 locations->SetOut(Location::RequiresRegister());
2453 locations->AddTemp(Location::RequiresFpuRegister());
2454 break;
2455
Roland Levillain946e1432014-11-11 17:35:19 +00002456 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002457 // Processing a Dex `double-to-int' instruction.
2458 locations->SetInAt(0, Location::RequiresFpuRegister());
2459 locations->SetOut(Location::RequiresRegister());
2460 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002461 break;
2462
2463 default:
2464 LOG(FATAL) << "Unexpected type conversion from " << input_type
2465 << " to " << result_type;
2466 }
2467 break;
2468
Roland Levillaindff1f282014-11-05 14:15:05 +00002469 case Primitive::kPrimLong:
2470 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002471 case Primitive::kPrimBoolean:
2472 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002473 case Primitive::kPrimByte:
2474 case Primitive::kPrimShort:
2475 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002476 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002477 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002478 locations->SetInAt(0, Location::RegisterLocation(EAX));
2479 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2480 break;
2481
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002482 case Primitive::kPrimFloat:
Vladimir Marko949c91f2015-01-27 10:48:44 +00002483 case Primitive::kPrimDouble: {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002484 // Processing a Dex `float-to-long' or 'double-to-long' instruction.
Vladimir Marko949c91f2015-01-27 10:48:44 +00002485 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002486 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2487 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2488
Vladimir Marko949c91f2015-01-27 10:48:44 +00002489 // The runtime helper puts the result in EAX, EDX.
2490 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002491 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002492 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002493
2494 default:
2495 LOG(FATAL) << "Unexpected type conversion from " << input_type
2496 << " to " << result_type;
2497 }
2498 break;
2499
Roland Levillain981e4542014-11-14 11:47:14 +00002500 case Primitive::kPrimChar:
2501 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002502 case Primitive::kPrimLong:
2503 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002504 case Primitive::kPrimBoolean:
2505 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002506 case Primitive::kPrimByte:
2507 case Primitive::kPrimShort:
2508 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002509 // Processing a Dex `int-to-char' instruction.
2510 locations->SetInAt(0, Location::Any());
2511 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2512 break;
2513
2514 default:
2515 LOG(FATAL) << "Unexpected type conversion from " << input_type
2516 << " to " << result_type;
2517 }
2518 break;
2519
Roland Levillaindff1f282014-11-05 14:15:05 +00002520 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002521 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002522 case Primitive::kPrimBoolean:
2523 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002524 case Primitive::kPrimByte:
2525 case Primitive::kPrimShort:
2526 case Primitive::kPrimInt:
2527 case Primitive::kPrimChar:
2528 // Processing a Dex `int-to-float' instruction.
2529 locations->SetInAt(0, Location::RequiresRegister());
2530 locations->SetOut(Location::RequiresFpuRegister());
2531 break;
2532
2533 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002534 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002535 locations->SetInAt(0, Location::Any());
2536 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002537 break;
2538
Roland Levillaincff13742014-11-17 14:32:17 +00002539 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002540 // Processing a Dex `double-to-float' instruction.
2541 locations->SetInAt(0, Location::RequiresFpuRegister());
2542 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002543 break;
2544
2545 default:
2546 LOG(FATAL) << "Unexpected type conversion from " << input_type
2547 << " to " << result_type;
2548 };
2549 break;
2550
Roland Levillaindff1f282014-11-05 14:15:05 +00002551 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002552 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002553 case Primitive::kPrimBoolean:
2554 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002555 case Primitive::kPrimByte:
2556 case Primitive::kPrimShort:
2557 case Primitive::kPrimInt:
2558 case Primitive::kPrimChar:
2559 // Processing a Dex `int-to-double' instruction.
2560 locations->SetInAt(0, Location::RequiresRegister());
2561 locations->SetOut(Location::RequiresFpuRegister());
2562 break;
2563
2564 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002565 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002566 locations->SetInAt(0, Location::Any());
2567 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002568 break;
2569
Roland Levillaincff13742014-11-17 14:32:17 +00002570 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002571 // Processing a Dex `float-to-double' instruction.
2572 locations->SetInAt(0, Location::RequiresFpuRegister());
2573 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002574 break;
2575
2576 default:
2577 LOG(FATAL) << "Unexpected type conversion from " << input_type
2578 << " to " << result_type;
2579 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002580 break;
2581
2582 default:
2583 LOG(FATAL) << "Unexpected type conversion from " << input_type
2584 << " to " << result_type;
2585 }
2586}
2587
2588void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2589 LocationSummary* locations = conversion->GetLocations();
2590 Location out = locations->Out();
2591 Location in = locations->InAt(0);
2592 Primitive::Type result_type = conversion->GetResultType();
2593 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002594 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002595 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002596 case Primitive::kPrimByte:
2597 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002598 case Primitive::kPrimLong:
2599 // Type conversion from long to byte is a result of code transformations.
2600 if (in.IsRegisterPair()) {
2601 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2602 } else {
2603 DCHECK(in.GetConstant()->IsLongConstant());
2604 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2605 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2606 }
2607 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002608 case Primitive::kPrimBoolean:
2609 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002610 case Primitive::kPrimShort:
2611 case Primitive::kPrimInt:
2612 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002613 // Processing a Dex `int-to-byte' instruction.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002614 if (in.IsRegister()) {
2615 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002616 } else {
2617 DCHECK(in.GetConstant()->IsIntConstant());
2618 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2619 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2620 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002621 break;
2622
2623 default:
2624 LOG(FATAL) << "Unexpected type conversion from " << input_type
2625 << " to " << result_type;
2626 }
2627 break;
2628
Roland Levillain01a8d712014-11-14 16:27:39 +00002629 case Primitive::kPrimShort:
2630 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002631 case Primitive::kPrimLong:
2632 // Type conversion from long to short is a result of code transformations.
2633 if (in.IsRegisterPair()) {
2634 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2635 } else if (in.IsDoubleStackSlot()) {
2636 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2637 } else {
2638 DCHECK(in.GetConstant()->IsLongConstant());
2639 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2640 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2641 }
2642 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002643 case Primitive::kPrimBoolean:
2644 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002645 case Primitive::kPrimByte:
2646 case Primitive::kPrimInt:
2647 case Primitive::kPrimChar:
2648 // Processing a Dex `int-to-short' instruction.
2649 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002650 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002651 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002652 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002653 } else {
2654 DCHECK(in.GetConstant()->IsIntConstant());
2655 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002656 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002657 }
2658 break;
2659
2660 default:
2661 LOG(FATAL) << "Unexpected type conversion from " << input_type
2662 << " to " << result_type;
2663 }
2664 break;
2665
Roland Levillain946e1432014-11-11 17:35:19 +00002666 case Primitive::kPrimInt:
2667 switch (input_type) {
2668 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002669 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002670 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002671 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002672 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002673 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002674 } else {
2675 DCHECK(in.IsConstant());
2676 DCHECK(in.GetConstant()->IsLongConstant());
2677 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002678 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002679 }
2680 break;
2681
Roland Levillain3f8f9362014-12-02 17:45:01 +00002682 case Primitive::kPrimFloat: {
2683 // Processing a Dex `float-to-int' instruction.
2684 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2685 Register output = out.AsRegister<Register>();
2686 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002687 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002688
2689 __ movl(output, Immediate(kPrimIntMax));
2690 // temp = int-to-float(output)
2691 __ cvtsi2ss(temp, output);
2692 // if input >= temp goto done
2693 __ comiss(input, temp);
2694 __ j(kAboveEqual, &done);
2695 // if input == NaN goto nan
2696 __ j(kUnordered, &nan);
2697 // output = float-to-int-truncate(input)
2698 __ cvttss2si(output, input);
2699 __ jmp(&done);
2700 __ Bind(&nan);
2701 // output = 0
2702 __ xorl(output, output);
2703 __ Bind(&done);
2704 break;
2705 }
2706
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002707 case Primitive::kPrimDouble: {
2708 // Processing a Dex `double-to-int' instruction.
2709 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2710 Register output = out.AsRegister<Register>();
2711 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002712 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002713
2714 __ movl(output, Immediate(kPrimIntMax));
2715 // temp = int-to-double(output)
2716 __ cvtsi2sd(temp, output);
2717 // if input >= temp goto done
2718 __ comisd(input, temp);
2719 __ j(kAboveEqual, &done);
2720 // if input == NaN goto nan
2721 __ j(kUnordered, &nan);
2722 // output = double-to-int-truncate(input)
2723 __ cvttsd2si(output, input);
2724 __ jmp(&done);
2725 __ Bind(&nan);
2726 // output = 0
2727 __ xorl(output, output);
2728 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002729 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002730 }
Roland Levillain946e1432014-11-11 17:35:19 +00002731
2732 default:
2733 LOG(FATAL) << "Unexpected type conversion from " << input_type
2734 << " to " << result_type;
2735 }
2736 break;
2737
Roland Levillaindff1f282014-11-05 14:15:05 +00002738 case Primitive::kPrimLong:
2739 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002740 case Primitive::kPrimBoolean:
2741 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002742 case Primitive::kPrimByte:
2743 case Primitive::kPrimShort:
2744 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002745 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002746 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002747 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2748 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002749 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002750 __ cdq();
2751 break;
2752
2753 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002754 // Processing a Dex `float-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002755 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002756 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002757 break;
2758
Roland Levillaindff1f282014-11-05 14:15:05 +00002759 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002760 // Processing a Dex `double-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002761 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002762 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002763 break;
2764
2765 default:
2766 LOG(FATAL) << "Unexpected type conversion from " << input_type
2767 << " to " << result_type;
2768 }
2769 break;
2770
Roland Levillain981e4542014-11-14 11:47:14 +00002771 case Primitive::kPrimChar:
2772 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002773 case Primitive::kPrimLong:
2774 // Type conversion from long to short is a result of code transformations.
2775 if (in.IsRegisterPair()) {
2776 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2777 } else if (in.IsDoubleStackSlot()) {
2778 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2779 } else {
2780 DCHECK(in.GetConstant()->IsLongConstant());
2781 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2782 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2783 }
2784 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002785 case Primitive::kPrimBoolean:
2786 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002787 case Primitive::kPrimByte:
2788 case Primitive::kPrimShort:
2789 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002790 // Processing a Dex `Process a Dex `int-to-char'' instruction.
2791 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002792 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain981e4542014-11-14 11:47:14 +00002793 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002794 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain981e4542014-11-14 11:47:14 +00002795 } else {
2796 DCHECK(in.GetConstant()->IsIntConstant());
2797 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002798 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Roland Levillain981e4542014-11-14 11:47:14 +00002799 }
2800 break;
2801
2802 default:
2803 LOG(FATAL) << "Unexpected type conversion from " << input_type
2804 << " to " << result_type;
2805 }
2806 break;
2807
Roland Levillaindff1f282014-11-05 14:15:05 +00002808 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002809 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002810 case Primitive::kPrimBoolean:
2811 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002812 case Primitive::kPrimByte:
2813 case Primitive::kPrimShort:
2814 case Primitive::kPrimInt:
2815 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002816 // Processing a Dex `int-to-float' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002817 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002818 break;
2819
Roland Levillain6d0e4832014-11-27 18:31:21 +00002820 case Primitive::kPrimLong: {
2821 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002822 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002823
Roland Levillain232ade02015-04-20 15:14:36 +01002824 // Create stack space for the call to
2825 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2826 // TODO: enhance register allocator to ask for stack temporaries.
2827 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
2828 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2829 __ subl(ESP, Immediate(adjustment));
2830 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002831
Roland Levillain232ade02015-04-20 15:14:36 +01002832 // Load the value to the FP stack, using temporaries if needed.
2833 PushOntoFPStack(in, 0, adjustment, false, true);
2834
2835 if (out.IsStackSlot()) {
2836 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2837 } else {
2838 __ fstps(Address(ESP, 0));
2839 Location stack_temp = Location::StackSlot(0);
2840 codegen_->Move32(out, stack_temp);
2841 }
2842
2843 // Remove the temporary stack space we allocated.
2844 if (adjustment != 0) {
2845 __ addl(ESP, Immediate(adjustment));
2846 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002847 break;
2848 }
2849
Roland Levillaincff13742014-11-17 14:32:17 +00002850 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002851 // Processing a Dex `double-to-float' instruction.
2852 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002853 break;
2854
2855 default:
2856 LOG(FATAL) << "Unexpected type conversion from " << input_type
2857 << " to " << result_type;
2858 };
2859 break;
2860
Roland Levillaindff1f282014-11-05 14:15:05 +00002861 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002862 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002863 case Primitive::kPrimBoolean:
2864 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002865 case Primitive::kPrimByte:
2866 case Primitive::kPrimShort:
2867 case Primitive::kPrimInt:
2868 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002869 // Processing a Dex `int-to-double' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002870 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002871 break;
2872
Roland Levillain647b9ed2014-11-27 12:06:00 +00002873 case Primitive::kPrimLong: {
2874 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002875 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002876
Roland Levillain232ade02015-04-20 15:14:36 +01002877 // Create stack space for the call to
2878 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2879 // TODO: enhance register allocator to ask for stack temporaries.
2880 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
2881 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2882 __ subl(ESP, Immediate(adjustment));
2883 }
2884
2885 // Load the value to the FP stack, using temporaries if needed.
2886 PushOntoFPStack(in, 0, adjustment, false, true);
2887
2888 if (out.IsDoubleStackSlot()) {
2889 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2890 } else {
2891 __ fstpl(Address(ESP, 0));
2892 Location stack_temp = Location::DoubleStackSlot(0);
2893 codegen_->Move64(out, stack_temp);
2894 }
2895
2896 // Remove the temporary stack space we allocated.
2897 if (adjustment != 0) {
2898 __ addl(ESP, Immediate(adjustment));
2899 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002900 break;
2901 }
2902
Roland Levillaincff13742014-11-17 14:32:17 +00002903 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002904 // Processing a Dex `float-to-double' instruction.
2905 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002906 break;
2907
2908 default:
2909 LOG(FATAL) << "Unexpected type conversion from " << input_type
2910 << " to " << result_type;
2911 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002912 break;
2913
2914 default:
2915 LOG(FATAL) << "Unexpected type conversion from " << input_type
2916 << " to " << result_type;
2917 }
2918}
2919
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002920void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002921 LocationSummary* locations =
2922 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002923 switch (add->GetResultType()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002924 case Primitive::kPrimInt: {
2925 locations->SetInAt(0, Location::RequiresRegister());
2926 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2927 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2928 break;
2929 }
2930
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002931 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002932 locations->SetInAt(0, Location::RequiresRegister());
2933 locations->SetInAt(1, Location::Any());
2934 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002935 break;
2936 }
2937
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002938 case Primitive::kPrimFloat:
2939 case Primitive::kPrimDouble: {
2940 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002941 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2942 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00002943 } else if (add->InputAt(1)->IsConstant()) {
2944 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002945 } else {
2946 locations->SetInAt(1, Location::Any());
2947 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002948 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002949 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002950 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002951
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002952 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002953 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2954 break;
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002955 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002956}
2957
2958void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
2959 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002960 Location first = locations->InAt(0);
2961 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05002962 Location out = locations->Out();
2963
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002964 switch (add->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002965 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002966 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002967 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2968 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002969 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2970 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05002971 } else {
2972 __ leal(out.AsRegister<Register>(), Address(
2973 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
2974 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002975 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002976 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
2977 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2978 __ addl(out.AsRegister<Register>(), Immediate(value));
2979 } else {
2980 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
2981 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002982 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05002983 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002984 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002985 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002986 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002987 }
2988
2989 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00002990 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002991 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
2992 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00002993 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002994 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
2995 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002996 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00002997 } else {
2998 DCHECK(second.IsConstant()) << second;
2999 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3000 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3001 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003002 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003003 break;
3004 }
3005
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003006 case Primitive::kPrimFloat: {
3007 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003008 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003009 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3010 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003011 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003012 __ addss(first.AsFpuRegister<XmmRegister>(),
3013 codegen_->LiteralFloatAddress(
3014 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3015 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3016 } else {
3017 DCHECK(second.IsStackSlot());
3018 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003019 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003020 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003021 }
3022
3023 case Primitive::kPrimDouble: {
3024 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003025 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003026 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3027 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003028 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003029 __ addsd(first.AsFpuRegister<XmmRegister>(),
3030 codegen_->LiteralDoubleAddress(
3031 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3032 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3033 } else {
3034 DCHECK(second.IsDoubleStackSlot());
3035 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003036 }
3037 break;
3038 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003039
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003040 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003041 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003042 }
3043}
3044
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003045void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003046 LocationSummary* locations =
3047 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003048 switch (sub->GetResultType()) {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003049 case Primitive::kPrimInt:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003050 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003051 locations->SetInAt(0, Location::RequiresRegister());
3052 locations->SetInAt(1, Location::Any());
3053 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003054 break;
3055 }
Calin Juravle11351682014-10-23 15:38:15 +01003056 case Primitive::kPrimFloat:
3057 case Primitive::kPrimDouble: {
3058 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003059 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3060 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003061 } else if (sub->InputAt(1)->IsConstant()) {
3062 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003063 } else {
3064 locations->SetInAt(1, Location::Any());
3065 }
Calin Juravle11351682014-10-23 15:38:15 +01003066 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003067 break;
Calin Juravle11351682014-10-23 15:38:15 +01003068 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003069
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003070 default:
Calin Juravle11351682014-10-23 15:38:15 +01003071 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003072 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003073}
3074
3075void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3076 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003077 Location first = locations->InAt(0);
3078 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003079 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003080 switch (sub->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003081 case Primitive::kPrimInt: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003082 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003083 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003084 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003085 __ subl(first.AsRegister<Register>(),
3086 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003087 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003088 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003089 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003090 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003091 }
3092
3093 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003094 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003095 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3096 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003097 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003098 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003099 __ sbbl(first.AsRegisterPairHigh<Register>(),
3100 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003101 } else {
3102 DCHECK(second.IsConstant()) << second;
3103 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3104 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3105 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003106 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003107 break;
3108 }
3109
Calin Juravle11351682014-10-23 15:38:15 +01003110 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003111 if (second.IsFpuRegister()) {
3112 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3113 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3114 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003115 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003116 __ subss(first.AsFpuRegister<XmmRegister>(),
3117 codegen_->LiteralFloatAddress(
3118 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3119 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3120 } else {
3121 DCHECK(second.IsStackSlot());
3122 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3123 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003124 break;
Calin Juravle11351682014-10-23 15:38:15 +01003125 }
3126
3127 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003128 if (second.IsFpuRegister()) {
3129 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3130 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3131 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003132 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003133 __ subsd(first.AsFpuRegister<XmmRegister>(),
3134 codegen_->LiteralDoubleAddress(
3135 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3136 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3137 } else {
3138 DCHECK(second.IsDoubleStackSlot());
3139 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3140 }
Calin Juravle11351682014-10-23 15:38:15 +01003141 break;
3142 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003143
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003144 default:
Calin Juravle11351682014-10-23 15:38:15 +01003145 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003146 }
3147}
3148
Calin Juravle34bacdf2014-10-07 20:23:36 +01003149void LocationsBuilderX86::VisitMul(HMul* mul) {
3150 LocationSummary* locations =
3151 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3152 switch (mul->GetResultType()) {
3153 case Primitive::kPrimInt:
3154 locations->SetInAt(0, Location::RequiresRegister());
3155 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003156 if (mul->InputAt(1)->IsIntConstant()) {
3157 // Can use 3 operand multiply.
3158 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3159 } else {
3160 locations->SetOut(Location::SameAsFirstInput());
3161 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003162 break;
3163 case Primitive::kPrimLong: {
3164 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003165 locations->SetInAt(1, Location::Any());
3166 locations->SetOut(Location::SameAsFirstInput());
3167 // Needed for imul on 32bits with 64bits output.
3168 locations->AddTemp(Location::RegisterLocation(EAX));
3169 locations->AddTemp(Location::RegisterLocation(EDX));
3170 break;
3171 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003172 case Primitive::kPrimFloat:
3173 case Primitive::kPrimDouble: {
3174 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003175 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3176 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003177 } else if (mul->InputAt(1)->IsConstant()) {
3178 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003179 } else {
3180 locations->SetInAt(1, Location::Any());
3181 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003182 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003183 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003184 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003185
3186 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003187 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003188 }
3189}
3190
3191void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3192 LocationSummary* locations = mul->GetLocations();
3193 Location first = locations->InAt(0);
3194 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003195 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196
3197 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003198 case Primitive::kPrimInt:
3199 // The constant may have ended up in a register, so test explicitly to avoid
3200 // problems where the output may not be the same as the first operand.
3201 if (mul->InputAt(1)->IsIntConstant()) {
3202 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3203 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3204 } else if (second.IsRegister()) {
3205 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003206 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003207 } else {
3208 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003209 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003210 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003211 }
3212 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003213
3214 case Primitive::kPrimLong: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003215 Register in1_hi = first.AsRegisterPairHigh<Register>();
3216 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003217 Register eax = locations->GetTemp(0).AsRegister<Register>();
3218 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003219
3220 DCHECK_EQ(EAX, eax);
3221 DCHECK_EQ(EDX, edx);
3222
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003223 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003224 // output: in1
3225 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3226 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3227 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003228 if (second.IsConstant()) {
3229 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003230
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003231 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3232 int32_t low_value = Low32Bits(value);
3233 int32_t high_value = High32Bits(value);
3234 Immediate low(low_value);
3235 Immediate high(high_value);
3236
3237 __ movl(eax, high);
3238 // eax <- in1.lo * in2.hi
3239 __ imull(eax, in1_lo);
3240 // in1.hi <- in1.hi * in2.lo
3241 __ imull(in1_hi, low);
3242 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3243 __ addl(in1_hi, eax);
3244 // move in2_lo to eax to prepare for double precision
3245 __ movl(eax, low);
3246 // edx:eax <- in1.lo * in2.lo
3247 __ mull(in1_lo);
3248 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3249 __ addl(in1_hi, edx);
3250 // in1.lo <- (in1.lo * in2.lo)[31:0];
3251 __ movl(in1_lo, eax);
3252 } else if (second.IsRegisterPair()) {
3253 Register in2_hi = second.AsRegisterPairHigh<Register>();
3254 Register in2_lo = second.AsRegisterPairLow<Register>();
3255
3256 __ movl(eax, in2_hi);
3257 // eax <- in1.lo * in2.hi
3258 __ imull(eax, in1_lo);
3259 // in1.hi <- in1.hi * in2.lo
3260 __ imull(in1_hi, in2_lo);
3261 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3262 __ addl(in1_hi, eax);
3263 // move in1_lo to eax to prepare for double precision
3264 __ movl(eax, in1_lo);
3265 // edx:eax <- in1.lo * in2.lo
3266 __ mull(in2_lo);
3267 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3268 __ addl(in1_hi, edx);
3269 // in1.lo <- (in1.lo * in2.lo)[31:0];
3270 __ movl(in1_lo, eax);
3271 } else {
3272 DCHECK(second.IsDoubleStackSlot()) << second;
3273 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3274 Address in2_lo(ESP, second.GetStackIndex());
3275
3276 __ movl(eax, in2_hi);
3277 // eax <- in1.lo * in2.hi
3278 __ imull(eax, in1_lo);
3279 // in1.hi <- in1.hi * in2.lo
3280 __ imull(in1_hi, in2_lo);
3281 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3282 __ addl(in1_hi, eax);
3283 // move in1_lo to eax to prepare for double precision
3284 __ movl(eax, in1_lo);
3285 // edx:eax <- in1.lo * in2.lo
3286 __ mull(in2_lo);
3287 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3288 __ addl(in1_hi, edx);
3289 // in1.lo <- (in1.lo * in2.lo)[31:0];
3290 __ movl(in1_lo, eax);
3291 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003292
3293 break;
3294 }
3295
Calin Juravleb5bfa962014-10-21 18:02:24 +01003296 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003297 DCHECK(first.Equals(locations->Out()));
3298 if (second.IsFpuRegister()) {
3299 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3300 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3301 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003302 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003303 __ mulss(first.AsFpuRegister<XmmRegister>(),
3304 codegen_->LiteralFloatAddress(
3305 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3306 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3307 } else {
3308 DCHECK(second.IsStackSlot());
3309 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3310 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003311 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003312 }
3313
3314 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003315 DCHECK(first.Equals(locations->Out()));
3316 if (second.IsFpuRegister()) {
3317 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3318 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3319 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003320 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003321 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3322 codegen_->LiteralDoubleAddress(
3323 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3324 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3325 } else {
3326 DCHECK(second.IsDoubleStackSlot());
3327 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3328 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003329 break;
3330 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003331
3332 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003333 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003334 }
3335}
3336
Roland Levillain232ade02015-04-20 15:14:36 +01003337void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3338 uint32_t temp_offset,
3339 uint32_t stack_adjustment,
3340 bool is_fp,
3341 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003342 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003343 DCHECK(!is_wide);
3344 if (is_fp) {
3345 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3346 } else {
3347 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3348 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003349 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003350 DCHECK(is_wide);
3351 if (is_fp) {
3352 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3353 } else {
3354 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3355 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003356 } else {
3357 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003358 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003359 Location stack_temp = Location::StackSlot(temp_offset);
3360 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003361 if (is_fp) {
3362 __ flds(Address(ESP, temp_offset));
3363 } else {
3364 __ filds(Address(ESP, temp_offset));
3365 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003366 } else {
3367 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3368 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003369 if (is_fp) {
3370 __ fldl(Address(ESP, temp_offset));
3371 } else {
3372 __ fildl(Address(ESP, temp_offset));
3373 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003374 }
3375 }
3376}
3377
3378void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
3379 Primitive::Type type = rem->GetResultType();
3380 bool is_float = type == Primitive::kPrimFloat;
3381 size_t elem_size = Primitive::ComponentSize(type);
3382 LocationSummary* locations = rem->GetLocations();
3383 Location first = locations->InAt(0);
3384 Location second = locations->InAt(1);
3385 Location out = locations->Out();
3386
3387 // Create stack space for 2 elements.
3388 // TODO: enhance register allocator to ask for stack temporaries.
3389 __ subl(ESP, Immediate(2 * elem_size));
3390
3391 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003392 const bool is_wide = !is_float;
3393 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp */ true, is_wide);
3394 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003395
3396 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003397 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003398 __ Bind(&retry);
3399 __ fprem();
3400
3401 // Move FP status to AX.
3402 __ fstsw();
3403
3404 // And see if the argument reduction is complete. This is signaled by the
3405 // C2 FPU flag bit set to 0.
3406 __ andl(EAX, Immediate(kC2ConditionMask));
3407 __ j(kNotEqual, &retry);
3408
3409 // We have settled on the final value. Retrieve it into an XMM register.
3410 // Store FP top of stack to real stack.
3411 if (is_float) {
3412 __ fsts(Address(ESP, 0));
3413 } else {
3414 __ fstl(Address(ESP, 0));
3415 }
3416
3417 // Pop the 2 items from the FP stack.
3418 __ fucompp();
3419
3420 // Load the value from the stack into an XMM register.
3421 DCHECK(out.IsFpuRegister()) << out;
3422 if (is_float) {
3423 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3424 } else {
3425 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3426 }
3427
3428 // And remove the temporary stack space we allocated.
3429 __ addl(ESP, Immediate(2 * elem_size));
3430}
3431
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003432
3433void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3434 DCHECK(instruction->IsDiv() || instruction->IsRem());
3435
3436 LocationSummary* locations = instruction->GetLocations();
3437 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003438 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003439
3440 Register out_register = locations->Out().AsRegister<Register>();
3441 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003442 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003443
3444 DCHECK(imm == 1 || imm == -1);
3445
3446 if (instruction->IsRem()) {
3447 __ xorl(out_register, out_register);
3448 } else {
3449 __ movl(out_register, input_register);
3450 if (imm == -1) {
3451 __ negl(out_register);
3452 }
3453 }
3454}
3455
3456
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003457void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003458 LocationSummary* locations = instruction->GetLocations();
3459
3460 Register out_register = locations->Out().AsRegister<Register>();
3461 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003462 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003463 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3464 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003465
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003466 Register num = locations->GetTemp(0).AsRegister<Register>();
3467
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003468 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003469 __ testl(input_register, input_register);
3470 __ cmovl(kGreaterEqual, num, input_register);
3471 int shift = CTZ(imm);
3472 __ sarl(num, Immediate(shift));
3473
3474 if (imm < 0) {
3475 __ negl(num);
3476 }
3477
3478 __ movl(out_register, num);
3479}
3480
3481void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3482 DCHECK(instruction->IsDiv() || instruction->IsRem());
3483
3484 LocationSummary* locations = instruction->GetLocations();
3485 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3486
3487 Register eax = locations->InAt(0).AsRegister<Register>();
3488 Register out = locations->Out().AsRegister<Register>();
3489 Register num;
3490 Register edx;
3491
3492 if (instruction->IsDiv()) {
3493 edx = locations->GetTemp(0).AsRegister<Register>();
3494 num = locations->GetTemp(1).AsRegister<Register>();
3495 } else {
3496 edx = locations->Out().AsRegister<Register>();
3497 num = locations->GetTemp(0).AsRegister<Register>();
3498 }
3499
3500 DCHECK_EQ(EAX, eax);
3501 DCHECK_EQ(EDX, edx);
3502 if (instruction->IsDiv()) {
3503 DCHECK_EQ(EAX, out);
3504 } else {
3505 DCHECK_EQ(EDX, out);
3506 }
3507
3508 int64_t magic;
3509 int shift;
3510 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3511
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003512 // Save the numerator.
3513 __ movl(num, eax);
3514
3515 // EAX = magic
3516 __ movl(eax, Immediate(magic));
3517
3518 // EDX:EAX = magic * numerator
3519 __ imull(num);
3520
3521 if (imm > 0 && magic < 0) {
3522 // EDX += num
3523 __ addl(edx, num);
3524 } else if (imm < 0 && magic > 0) {
3525 __ subl(edx, num);
3526 }
3527
3528 // Shift if needed.
3529 if (shift != 0) {
3530 __ sarl(edx, Immediate(shift));
3531 }
3532
3533 // EDX += 1 if EDX < 0
3534 __ movl(eax, edx);
3535 __ shrl(edx, Immediate(31));
3536 __ addl(edx, eax);
3537
3538 if (instruction->IsRem()) {
3539 __ movl(eax, num);
3540 __ imull(edx, Immediate(imm));
3541 __ subl(eax, edx);
3542 __ movl(edx, eax);
3543 } else {
3544 __ movl(eax, edx);
3545 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003546}
3547
Calin Juravlebacfec32014-11-14 15:54:36 +00003548void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3549 DCHECK(instruction->IsDiv() || instruction->IsRem());
3550
3551 LocationSummary* locations = instruction->GetLocations();
3552 Location out = locations->Out();
3553 Location first = locations->InAt(0);
3554 Location second = locations->InAt(1);
3555 bool is_div = instruction->IsDiv();
3556
3557 switch (instruction->GetResultType()) {
3558 case Primitive::kPrimInt: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003559 DCHECK_EQ(EAX, first.AsRegister<Register>());
3560 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003561
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003562 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003563 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003564
3565 if (imm == 0) {
3566 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3567 } else if (imm == 1 || imm == -1) {
3568 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003569 } else if (is_div && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003570 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003571 } else {
3572 DCHECK(imm <= -2 || imm >= 2);
3573 GenerateDivRemWithAnyConstant(instruction);
3574 }
3575 } else {
David Srbecky9cd6d372016-02-09 15:24:47 +00003576 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86(
3577 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003578 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003579
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003580 Register second_reg = second.AsRegister<Register>();
3581 // 0x80000000/-1 triggers an arithmetic exception!
3582 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3583 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003584
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003585 __ cmpl(second_reg, Immediate(-1));
3586 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003587
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003588 // edx:eax <- sign-extended of eax
3589 __ cdq();
3590 // eax = quotient, edx = remainder
3591 __ idivl(second_reg);
3592 __ Bind(slow_path->GetExitLabel());
3593 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003594 break;
3595 }
3596
3597 case Primitive::kPrimLong: {
3598 InvokeRuntimeCallingConvention calling_convention;
3599 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3600 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3601 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3602 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3603 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3604 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3605
3606 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003607 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003608 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003609 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003610 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003611 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003612 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003613 break;
3614 }
3615
3616 default:
3617 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3618 }
3619}
3620
Calin Juravle7c4954d2014-10-28 16:57:40 +00003621void LocationsBuilderX86::VisitDiv(HDiv* div) {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003622 LocationSummary::CallKind call_kind = (div->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003623 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003624 : LocationSummary::kNoCall;
3625 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3626
Calin Juravle7c4954d2014-10-28 16:57:40 +00003627 switch (div->GetResultType()) {
Calin Juravled0d48522014-11-04 16:40:20 +00003628 case Primitive::kPrimInt: {
3629 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003630 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003631 locations->SetOut(Location::SameAsFirstInput());
3632 // Intel uses edx:eax as the dividend.
3633 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003634 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3635 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3636 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003637 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003638 locations->AddTemp(Location::RequiresRegister());
3639 }
Calin Juravled0d48522014-11-04 16:40:20 +00003640 break;
3641 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003642 case Primitive::kPrimLong: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003643 InvokeRuntimeCallingConvention calling_convention;
3644 locations->SetInAt(0, Location::RegisterPairLocation(
3645 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3646 locations->SetInAt(1, Location::RegisterPairLocation(
3647 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3648 // Runtime helper puts the result in EAX, EDX.
3649 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003650 break;
3651 }
3652 case Primitive::kPrimFloat:
3653 case Primitive::kPrimDouble: {
3654 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003655 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3656 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003657 } else if (div->InputAt(1)->IsConstant()) {
3658 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003659 } else {
3660 locations->SetInAt(1, Location::Any());
3661 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003662 locations->SetOut(Location::SameAsFirstInput());
3663 break;
3664 }
3665
3666 default:
3667 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3668 }
3669}
3670
3671void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3672 LocationSummary* locations = div->GetLocations();
3673 Location first = locations->InAt(0);
3674 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003675
3676 switch (div->GetResultType()) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003677 case Primitive::kPrimInt:
Calin Juravle7c4954d2014-10-28 16:57:40 +00003678 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003679 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003680 break;
3681 }
3682
3683 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003684 if (second.IsFpuRegister()) {
3685 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3686 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3687 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003688 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003689 __ divss(first.AsFpuRegister<XmmRegister>(),
3690 codegen_->LiteralFloatAddress(
3691 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3692 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3693 } else {
3694 DCHECK(second.IsStackSlot());
3695 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3696 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003697 break;
3698 }
3699
3700 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003701 if (second.IsFpuRegister()) {
3702 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3703 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3704 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003705 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003706 __ divsd(first.AsFpuRegister<XmmRegister>(),
3707 codegen_->LiteralDoubleAddress(
3708 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3709 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3710 } else {
3711 DCHECK(second.IsDoubleStackSlot());
3712 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3713 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003714 break;
3715 }
3716
3717 default:
3718 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3719 }
3720}
3721
Calin Juravlebacfec32014-11-14 15:54:36 +00003722void LocationsBuilderX86::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003723 Primitive::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003724
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003725 LocationSummary::CallKind call_kind = (rem->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003726 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003727 : LocationSummary::kNoCall;
3728 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003729
Calin Juravled2ec87d2014-12-08 14:24:46 +00003730 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003731 case Primitive::kPrimInt: {
3732 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003733 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003734 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003735 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3736 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3737 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003738 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003739 locations->AddTemp(Location::RequiresRegister());
3740 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003741 break;
3742 }
3743 case Primitive::kPrimLong: {
3744 InvokeRuntimeCallingConvention calling_convention;
3745 locations->SetInAt(0, Location::RegisterPairLocation(
3746 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3747 locations->SetInAt(1, Location::RegisterPairLocation(
3748 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3749 // Runtime helper puts the result in EAX, EDX.
3750 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3751 break;
3752 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003753 case Primitive::kPrimDouble:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003754 case Primitive::kPrimFloat: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003755 locations->SetInAt(0, Location::Any());
3756 locations->SetInAt(1, Location::Any());
3757 locations->SetOut(Location::RequiresFpuRegister());
3758 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003759 break;
3760 }
3761
3762 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003763 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003764 }
3765}
3766
3767void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
3768 Primitive::Type type = rem->GetResultType();
3769 switch (type) {
3770 case Primitive::kPrimInt:
3771 case Primitive::kPrimLong: {
3772 GenerateDivRemIntegral(rem);
3773 break;
3774 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003775 case Primitive::kPrimFloat:
Calin Juravlebacfec32014-11-14 15:54:36 +00003776 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003777 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003778 break;
3779 }
3780 default:
3781 LOG(FATAL) << "Unexpected rem type " << type;
3782 }
3783}
3784
Calin Juravled0d48522014-11-04 16:40:20 +00003785void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003786 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003787 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003788 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003789 case Primitive::kPrimByte:
3790 case Primitive::kPrimChar:
3791 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003792 case Primitive::kPrimInt: {
3793 locations->SetInAt(0, Location::Any());
3794 break;
3795 }
3796 case Primitive::kPrimLong: {
3797 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
3798 if (!instruction->IsConstant()) {
3799 locations->AddTemp(Location::RequiresRegister());
3800 }
3801 break;
3802 }
3803 default:
3804 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
3805 }
Calin Juravled0d48522014-11-04 16:40:20 +00003806}
3807
3808void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003809 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003810 codegen_->AddSlowPath(slow_path);
3811
3812 LocationSummary* locations = instruction->GetLocations();
3813 Location value = locations->InAt(0);
3814
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003815 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003816 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003817 case Primitive::kPrimByte:
3818 case Primitive::kPrimChar:
3819 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003820 case Primitive::kPrimInt: {
3821 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003822 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003823 __ j(kEqual, slow_path->GetEntryLabel());
3824 } else if (value.IsStackSlot()) {
3825 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
3826 __ j(kEqual, slow_path->GetEntryLabel());
3827 } else {
3828 DCHECK(value.IsConstant()) << value;
3829 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003830 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003831 }
3832 }
3833 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003834 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003835 case Primitive::kPrimLong: {
3836 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003837 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003838 __ movl(temp, value.AsRegisterPairLow<Register>());
3839 __ orl(temp, value.AsRegisterPairHigh<Register>());
3840 __ j(kEqual, slow_path->GetEntryLabel());
3841 } else {
3842 DCHECK(value.IsConstant()) << value;
3843 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3844 __ jmp(slow_path->GetEntryLabel());
3845 }
3846 }
3847 break;
3848 }
3849 default:
3850 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003851 }
Calin Juravled0d48522014-11-04 16:40:20 +00003852}
3853
Calin Juravle9aec02f2014-11-18 23:06:35 +00003854void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
3855 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3856
3857 LocationSummary* locations =
3858 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3859
3860 switch (op->GetResultType()) {
Mark P Mendell73945692015-04-29 14:56:17 +00003861 case Primitive::kPrimInt:
Calin Juravle9aec02f2014-11-18 23:06:35 +00003862 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003863 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00003864 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00003865 // The shift count needs to be in CL or a constant.
3866 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00003867 locations->SetOut(Location::SameAsFirstInput());
3868 break;
3869 }
3870 default:
3871 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3872 }
3873}
3874
3875void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
3876 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3877
3878 LocationSummary* locations = op->GetLocations();
3879 Location first = locations->InAt(0);
3880 Location second = locations->InAt(1);
3881 DCHECK(first.Equals(locations->Out()));
3882
3883 switch (op->GetResultType()) {
3884 case Primitive::kPrimInt: {
Mark P Mendell73945692015-04-29 14:56:17 +00003885 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003886 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003887 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003888 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003889 DCHECK_EQ(ECX, second_reg);
3890 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003891 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003892 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003893 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003894 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003895 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003896 }
3897 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003898 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003899 if (shift == 0) {
3900 return;
3901 }
3902 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003903 if (op->IsShl()) {
3904 __ shll(first_reg, imm);
3905 } else if (op->IsShr()) {
3906 __ sarl(first_reg, imm);
3907 } else {
3908 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003909 }
3910 }
3911 break;
3912 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003913 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003914 if (second.IsRegister()) {
3915 Register second_reg = second.AsRegister<Register>();
3916 DCHECK_EQ(ECX, second_reg);
3917 if (op->IsShl()) {
3918 GenerateShlLong(first, second_reg);
3919 } else if (op->IsShr()) {
3920 GenerateShrLong(first, second_reg);
3921 } else {
3922 GenerateUShrLong(first, second_reg);
3923 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003924 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00003925 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00003926 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003927 // Nothing to do if the shift is 0, as the input is already the output.
3928 if (shift != 0) {
3929 if (op->IsShl()) {
3930 GenerateShlLong(first, shift);
3931 } else if (op->IsShr()) {
3932 GenerateShrLong(first, shift);
3933 } else {
3934 GenerateUShrLong(first, shift);
3935 }
3936 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003937 }
3938 break;
3939 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00003940 default:
3941 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3942 }
3943}
3944
Mark P Mendell73945692015-04-29 14:56:17 +00003945void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
3946 Register low = loc.AsRegisterPairLow<Register>();
3947 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04003948 if (shift == 1) {
3949 // This is just an addition.
3950 __ addl(low, low);
3951 __ adcl(high, high);
3952 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00003953 // Shift by 32 is easy. High gets low, and low gets 0.
3954 codegen_->EmitParallelMoves(
3955 loc.ToLow(),
3956 loc.ToHigh(),
3957 Primitive::kPrimInt,
3958 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
3959 loc.ToLow(),
3960 Primitive::kPrimInt);
3961 } else if (shift > 32) {
3962 // Low part becomes 0. High part is low part << (shift-32).
3963 __ movl(high, low);
3964 __ shll(high, Immediate(shift - 32));
3965 __ xorl(low, low);
3966 } else {
3967 // Between 1 and 31.
3968 __ shld(high, low, Immediate(shift));
3969 __ shll(low, Immediate(shift));
3970 }
3971}
3972
Calin Juravle9aec02f2014-11-18 23:06:35 +00003973void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04003974 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00003975 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
3976 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
3977 __ testl(shifter, Immediate(32));
3978 __ j(kEqual, &done);
3979 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
3980 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
3981 __ Bind(&done);
3982}
3983
Mark P Mendell73945692015-04-29 14:56:17 +00003984void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
3985 Register low = loc.AsRegisterPairLow<Register>();
3986 Register high = loc.AsRegisterPairHigh<Register>();
3987 if (shift == 32) {
3988 // Need to copy the sign.
3989 DCHECK_NE(low, high);
3990 __ movl(low, high);
3991 __ sarl(high, Immediate(31));
3992 } else if (shift > 32) {
3993 DCHECK_NE(low, high);
3994 // High part becomes sign. Low part is shifted by shift - 32.
3995 __ movl(low, high);
3996 __ sarl(high, Immediate(31));
3997 __ sarl(low, Immediate(shift - 32));
3998 } else {
3999 // Between 1 and 31.
4000 __ shrd(low, high, Immediate(shift));
4001 __ sarl(high, Immediate(shift));
4002 }
4003}
4004
Calin Juravle9aec02f2014-11-18 23:06:35 +00004005void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004006 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004007 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4008 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4009 __ testl(shifter, Immediate(32));
4010 __ j(kEqual, &done);
4011 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4012 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4013 __ Bind(&done);
4014}
4015
Mark P Mendell73945692015-04-29 14:56:17 +00004016void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4017 Register low = loc.AsRegisterPairLow<Register>();
4018 Register high = loc.AsRegisterPairHigh<Register>();
4019 if (shift == 32) {
4020 // Shift by 32 is easy. Low gets high, and high gets 0.
4021 codegen_->EmitParallelMoves(
4022 loc.ToHigh(),
4023 loc.ToLow(),
4024 Primitive::kPrimInt,
4025 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4026 loc.ToHigh(),
4027 Primitive::kPrimInt);
4028 } else if (shift > 32) {
4029 // Low part is high >> (shift - 32). High part becomes 0.
4030 __ movl(low, high);
4031 __ shrl(low, Immediate(shift - 32));
4032 __ xorl(high, high);
4033 } else {
4034 // Between 1 and 31.
4035 __ shrd(low, high, Immediate(shift));
4036 __ shrl(high, Immediate(shift));
4037 }
4038}
4039
Calin Juravle9aec02f2014-11-18 23:06:35 +00004040void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004041 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004042 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4043 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4044 __ testl(shifter, Immediate(32));
4045 __ j(kEqual, &done);
4046 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4047 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4048 __ Bind(&done);
4049}
4050
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004051void LocationsBuilderX86::VisitRor(HRor* ror) {
4052 LocationSummary* locations =
4053 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4054
4055 switch (ror->GetResultType()) {
4056 case Primitive::kPrimLong:
4057 // Add the temporary needed.
4058 locations->AddTemp(Location::RequiresRegister());
4059 FALLTHROUGH_INTENDED;
4060 case Primitive::kPrimInt:
4061 locations->SetInAt(0, Location::RequiresRegister());
4062 // The shift count needs to be in CL (unless it is a constant).
4063 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4064 locations->SetOut(Location::SameAsFirstInput());
4065 break;
4066 default:
4067 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4068 UNREACHABLE();
4069 }
4070}
4071
4072void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4073 LocationSummary* locations = ror->GetLocations();
4074 Location first = locations->InAt(0);
4075 Location second = locations->InAt(1);
4076
4077 if (ror->GetResultType() == Primitive::kPrimInt) {
4078 Register first_reg = first.AsRegister<Register>();
4079 if (second.IsRegister()) {
4080 Register second_reg = second.AsRegister<Register>();
4081 __ rorl(first_reg, second_reg);
4082 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004083 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004084 __ rorl(first_reg, imm);
4085 }
4086 return;
4087 }
4088
4089 DCHECK_EQ(ror->GetResultType(), Primitive::kPrimLong);
4090 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4091 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4092 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4093 if (second.IsRegister()) {
4094 Register second_reg = second.AsRegister<Register>();
4095 DCHECK_EQ(second_reg, ECX);
4096 __ movl(temp_reg, first_reg_hi);
4097 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4098 __ shrd(first_reg_lo, temp_reg, second_reg);
4099 __ movl(temp_reg, first_reg_hi);
4100 __ testl(second_reg, Immediate(32));
4101 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4102 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4103 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004104 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004105 if (shift_amt == 0) {
4106 // Already fine.
4107 return;
4108 }
4109 if (shift_amt == 32) {
4110 // Just swap.
4111 __ movl(temp_reg, first_reg_lo);
4112 __ movl(first_reg_lo, first_reg_hi);
4113 __ movl(first_reg_hi, temp_reg);
4114 return;
4115 }
4116
4117 Immediate imm(shift_amt);
4118 // Save the constents of the low value.
4119 __ movl(temp_reg, first_reg_lo);
4120
4121 // Shift right into low, feeding bits from high.
4122 __ shrd(first_reg_lo, first_reg_hi, imm);
4123
4124 // Shift right into high, feeding bits from the original low.
4125 __ shrd(first_reg_hi, temp_reg, imm);
4126
4127 // Swap if needed.
4128 if (shift_amt > 32) {
4129 __ movl(temp_reg, first_reg_lo);
4130 __ movl(first_reg_lo, first_reg_hi);
4131 __ movl(first_reg_hi, temp_reg);
4132 }
4133 }
4134}
4135
Calin Juravle9aec02f2014-11-18 23:06:35 +00004136void LocationsBuilderX86::VisitShl(HShl* shl) {
4137 HandleShift(shl);
4138}
4139
4140void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4141 HandleShift(shl);
4142}
4143
4144void LocationsBuilderX86::VisitShr(HShr* shr) {
4145 HandleShift(shr);
4146}
4147
4148void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4149 HandleShift(shr);
4150}
4151
4152void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4153 HandleShift(ushr);
4154}
4155
4156void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4157 HandleShift(ushr);
4158}
4159
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004160void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004161 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004162 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004163 locations->SetOut(Location::RegisterLocation(EAX));
David Brazdil6de19382016-01-08 17:37:10 +00004164 if (instruction->IsStringAlloc()) {
4165 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4166 } else {
4167 InvokeRuntimeCallingConvention calling_convention;
4168 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004169 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004170}
4171
4172void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004173 // Note: if heap poisoning is enabled, the entry point takes cares
4174 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004175 if (instruction->IsStringAlloc()) {
4176 // String is allocated through StringFactory. Call NewEmptyString entry point.
4177 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004178 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004179 __ fs()->movl(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString)));
4180 __ call(Address(temp, code_offset.Int32Value()));
4181 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4182 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004183 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004184 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004185 DCHECK(!codegen_->IsLeafMethod());
4186 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004187}
4188
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004189void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
4190 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004191 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004192 locations->SetOut(Location::RegisterLocation(EAX));
4193 InvokeRuntimeCallingConvention calling_convention;
4194 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08004195 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffray69aa6012015-06-09 10:34:25 +01004196 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004197}
4198
4199void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
4200 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -08004201 __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex().index_));
Roland Levillain4d027112015-07-01 15:41:14 +01004202 // Note: if heap poisoning is enabled, the entry point takes cares
4203 // of poisoning the reference.
Serban Constantinescuba45db02016-07-12 22:53:02 +01004204 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004205 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004206 DCHECK(!codegen_->IsLeafMethod());
4207}
4208
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004209void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004210 LocationSummary* locations =
4211 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004212 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4213 if (location.IsStackSlot()) {
4214 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4215 } else if (location.IsDoubleStackSlot()) {
4216 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004217 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004218 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004219}
4220
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004221void InstructionCodeGeneratorX86::VisitParameterValue(
4222 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4223}
4224
4225void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4226 LocationSummary* locations =
4227 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4228 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4229}
4230
4231void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004232}
4233
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004234void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4235 LocationSummary* locations =
4236 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4237 locations->SetInAt(0, Location::RequiresRegister());
4238 locations->SetOut(Location::RequiresRegister());
4239}
4240
4241void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4242 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004243 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004244 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004245 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004246 __ movl(locations->Out().AsRegister<Register>(),
4247 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004248 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004249 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004250 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004251 __ movl(locations->Out().AsRegister<Register>(),
4252 Address(locations->InAt(0).AsRegister<Register>(),
4253 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4254 // temp = temp->GetImtEntryAt(method_offset);
4255 __ movl(locations->Out().AsRegister<Register>(),
4256 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004257 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004258}
4259
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004260void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004261 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004262 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004263 locations->SetInAt(0, Location::RequiresRegister());
4264 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004265}
4266
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004267void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4268 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004269 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004270 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004271 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004272 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004273 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004274 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004275 break;
4276
4277 case Primitive::kPrimLong:
Roland Levillain70566432014-10-24 16:20:17 +01004278 __ notl(out.AsRegisterPairLow<Register>());
4279 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004280 break;
4281
4282 default:
4283 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4284 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004285}
4286
David Brazdil66d126e2015-04-03 16:02:44 +01004287void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4288 LocationSummary* locations =
4289 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4290 locations->SetInAt(0, Location::RequiresRegister());
4291 locations->SetOut(Location::SameAsFirstInput());
4292}
4293
4294void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004295 LocationSummary* locations = bool_not->GetLocations();
4296 Location in = locations->InAt(0);
4297 Location out = locations->Out();
4298 DCHECK(in.Equals(out));
4299 __ xorl(out.AsRegister<Register>(), Immediate(1));
4300}
4301
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004302void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004303 LocationSummary* locations =
4304 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004305 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004306 case Primitive::kPrimBoolean:
4307 case Primitive::kPrimByte:
4308 case Primitive::kPrimShort:
4309 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004310 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00004311 case Primitive::kPrimLong: {
4312 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004313 locations->SetInAt(1, Location::Any());
4314 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4315 break;
4316 }
4317 case Primitive::kPrimFloat:
4318 case Primitive::kPrimDouble: {
4319 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004320 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4321 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4322 } else if (compare->InputAt(1)->IsConstant()) {
4323 locations->SetInAt(1, Location::RequiresFpuRegister());
4324 } else {
4325 locations->SetInAt(1, Location::Any());
4326 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004327 locations->SetOut(Location::RequiresRegister());
4328 break;
4329 }
4330 default:
4331 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4332 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004333}
4334
4335void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004336 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004337 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004338 Location left = locations->InAt(0);
4339 Location right = locations->InAt(1);
4340
Mark Mendell0c9497d2015-08-21 09:30:05 -04004341 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004342 Condition less_cond = kLess;
4343
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004344 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004345 case Primitive::kPrimBoolean:
4346 case Primitive::kPrimByte:
4347 case Primitive::kPrimShort:
4348 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004349 case Primitive::kPrimInt: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004350 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004351 break;
4352 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004353 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004354 Register left_low = left.AsRegisterPairLow<Register>();
4355 Register left_high = left.AsRegisterPairHigh<Register>();
4356 int32_t val_low = 0;
4357 int32_t val_high = 0;
4358 bool right_is_const = false;
4359
4360 if (right.IsConstant()) {
4361 DCHECK(right.GetConstant()->IsLongConstant());
4362 right_is_const = true;
4363 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4364 val_low = Low32Bits(val);
4365 val_high = High32Bits(val);
4366 }
4367
Calin Juravleddb7df22014-11-25 20:56:51 +00004368 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004369 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004370 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004371 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004372 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004373 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004374 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004375 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004376 __ j(kLess, &less); // Signed compare.
4377 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004378 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004379 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004380 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004381 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004382 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004383 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004384 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004385 }
Aart Bika19616e2016-02-01 18:57:58 -08004386 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004387 break;
4388 }
4389 case Primitive::kPrimFloat: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004390 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004391 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004392 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004393 break;
4394 }
4395 case Primitive::kPrimDouble: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004396 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004397 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004398 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004399 break;
4400 }
4401 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004402 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004403 }
Aart Bika19616e2016-02-01 18:57:58 -08004404
Calin Juravleddb7df22014-11-25 20:56:51 +00004405 __ movl(out, Immediate(0));
4406 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004407 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004408
4409 __ Bind(&greater);
4410 __ movl(out, Immediate(1));
4411 __ jmp(&done);
4412
4413 __ Bind(&less);
4414 __ movl(out, Immediate(-1));
4415
4416 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004417}
4418
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004419void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004420 LocationSummary* locations =
4421 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004422 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004423 locations->SetInAt(i, Location::Any());
4424 }
4425 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004426}
4427
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004428void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004429 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004430}
4431
Roland Levillain7c1559a2015-12-15 10:55:36 +00004432void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004433 /*
4434 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4435 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4436 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4437 */
4438 switch (kind) {
4439 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004440 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004441 break;
4442 }
4443 case MemBarrierKind::kAnyStore:
4444 case MemBarrierKind::kLoadAny:
4445 case MemBarrierKind::kStoreStore: {
4446 // nop
4447 break;
4448 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004449 case MemBarrierKind::kNTStoreStore:
4450 // Non-Temporal Store/Store needs an explicit fence.
4451 MemoryFence(/* non-temporal */ true);
4452 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004453 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004454}
4455
Vladimir Markodc151b22015-10-15 18:02:30 +01004456HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4457 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004458 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004459 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
4460
4461 // We disable pc-relative load when there is an irreducible loop, as the optimization
4462 // is incompatible with it.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004463 // TODO: Create as many X86ComputeBaseMethodAddress instructions
4464 // as needed for methods with irreducible loops.
Nicolas Geoffray15bd2282016-01-05 15:55:41 +00004465 if (GetGraph()->HasIrreducibleLoops() &&
4466 (dispatch_info.method_load_kind ==
4467 HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative)) {
4468 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
4469 }
Nicolas Geoffrayc1a42cf2016-12-18 15:52:36 +00004470 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004471}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004472
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004473Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4474 Register temp) {
4475 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004476 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004477 if (!invoke->GetLocations()->Intrinsified()) {
4478 return location.AsRegister<Register>();
4479 }
4480 // For intrinsics we allow any location, so it may be on the stack.
4481 if (!location.IsRegister()) {
4482 __ movl(temp, Address(ESP, location.GetStackIndex()));
4483 return temp;
4484 }
4485 // For register locations, check if the register was saved. If so, get it from the stack.
4486 // Note: There is a chance that the register was saved but not overwritten, so we could
4487 // save one load. However, since this is just an intrinsic slow path we prefer this
4488 // simple and more robust approach rather that trying to determine if that's the case.
4489 SlowPathCode* slow_path = GetCurrentSlowPath();
Serguei Katkov288c7a82016-05-16 11:53:15 +06004490 if (slow_path != nullptr) {
4491 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4492 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4493 __ movl(temp, Address(ESP, stack_offset));
4494 return temp;
4495 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004496 }
4497 return location.AsRegister<Register>();
4498}
4499
Serguei Katkov288c7a82016-05-16 11:53:15 +06004500Location CodeGeneratorX86::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4501 Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00004502 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4503 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004504 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004505 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004506 uint32_t offset =
4507 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4508 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004509 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004510 }
Vladimir Marko58155012015-08-19 12:49:41 +00004511 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004512 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004513 break;
4514 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4515 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4516 break;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004517 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4518 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4519 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004520 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004521 // Bind a new fixup label at the end of the "movl" insn.
4522 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004523 __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004524 break;
4525 }
Vladimir Marko58155012015-08-19 12:49:41 +00004526 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004527 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004528 Register method_reg;
4529 Register reg = temp.AsRegister<Register>();
4530 if (current_method.IsRegister()) {
4531 method_reg = current_method.AsRegister<Register>();
4532 } else {
David Brazdil58282f42016-01-14 12:45:10 +00004533 DCHECK(invoke->GetLocations()->Intrinsified());
Vladimir Marko58155012015-08-19 12:49:41 +00004534 DCHECK(!current_method.IsValid());
4535 method_reg = reg;
4536 __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
4537 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004538 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004539 __ movl(reg, Address(method_reg,
4540 ArtMethod::DexCacheResolvedMethodsOffset(kX86PointerSize).Int32Value()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01004541 // temp = temp[index_in_cache];
4542 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4543 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004544 __ movl(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
4545 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004546 }
Vladimir Marko58155012015-08-19 12:49:41 +00004547 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06004548 return callee_method;
4549}
4550
4551void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4552 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004553
4554 switch (invoke->GetCodePtrLocation()) {
4555 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4556 __ call(GetFrameEntryLabel());
4557 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004558 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4559 // (callee_method + offset_of_quick_compiled_code)()
4560 __ call(Address(callee_method.AsRegister<Register>(),
4561 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004562 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004563 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004564 }
4565
4566 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004567}
4568
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004569void CodeGeneratorX86::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
4570 Register temp = temp_in.AsRegister<Register>();
4571 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4572 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004573
4574 // Use the calling convention instead of the location of the receiver, as
4575 // intrinsics may have put the receiver in a different register. In the intrinsics
4576 // slow path, the arguments have been moved to the right place, so here we are
4577 // guaranteed that the receiver is the first register of the calling convention.
4578 InvokeDexCallingConvention calling_convention;
4579 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004580 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004581 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004582 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004583 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004584 // Instead of simply (possibly) unpoisoning `temp` here, we should
4585 // emit a read barrier for the previous class reference load.
4586 // However this is not required in practice, as this is an
4587 // intermediate/temporary reference and because the current
4588 // concurrent copying collector keeps the from-space memory
4589 // intact/accessible until the end of the marking phase (the
4590 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004591 __ MaybeUnpoisonHeapReference(temp);
4592 // temp = temp->GetMethodAt(method_offset);
4593 __ movl(temp, Address(temp, method_offset));
4594 // call temp->GetEntryPoint();
4595 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004596 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004597}
4598
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004599void CodeGeneratorX86::RecordSimplePatch() {
4600 if (GetCompilerOptions().GetIncludePatchInformation()) {
4601 simple_patches_.emplace_back();
4602 __ Bind(&simple_patches_.back());
4603 }
4604}
4605
Vladimir Markoaad75c62016-10-03 08:46:48 +00004606void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
4607 DCHECK(GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004608 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004609 __ Bind(&string_patches_.back().label);
4610}
4611
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004612void CodeGeneratorX86::RecordTypePatch(HLoadClass* load_class) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08004613 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004614 __ Bind(&type_patches_.back().label);
4615}
4616
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004617Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
4618 DCHECK(!GetCompilerOptions().IsBootImage());
4619 type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
4620 return &type_patches_.back().label;
4621}
4622
Vladimir Markoaad75c62016-10-03 08:46:48 +00004623Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
4624 DCHECK(!GetCompilerOptions().IsBootImage());
Andreas Gampe8a0128a2016-11-28 07:38:35 -08004625 string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004626 return &string_patches_.back().label;
4627}
4628
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004629Label* CodeGeneratorX86::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
4630 uint32_t element_offset) {
4631 // Add the patch entry and bind its label at the end of the instruction.
4632 pc_relative_dex_cache_patches_.emplace_back(dex_file, element_offset);
4633 return &pc_relative_dex_cache_patches_.back().label;
4634}
4635
Vladimir Markoaad75c62016-10-03 08:46:48 +00004636// The label points to the end of the "movl" or another instruction but the literal offset
4637// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
4638constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
4639
4640template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4641inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
4642 const ArenaDeque<PatchInfo<Label>>& infos,
4643 ArenaVector<LinkerPatch>* linker_patches) {
4644 for (const PatchInfo<Label>& info : infos) {
4645 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4646 linker_patches->push_back(
4647 Factory(literal_offset, &info.dex_file, GetMethodAddressOffset(), info.index));
4648 }
4649}
4650
Vladimir Marko58155012015-08-19 12:49:41 +00004651void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4652 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004653 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004654 pc_relative_dex_cache_patches_.size() +
4655 simple_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004656 string_patches_.size() +
4657 type_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004658 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004659 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
4660 linker_patches);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004661 for (const Label& label : simple_patches_) {
4662 uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4663 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
4664 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00004665 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004666 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004667 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
4668 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004669 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004670 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004671 } else {
4672 for (const PatchInfo<Label>& info : type_patches_) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004673 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004674 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004675 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004676 for (const PatchInfo<Label>& info : string_patches_) {
4677 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4678 linker_patches->push_back(
4679 LinkerPatch::StringPatch(literal_offset, &info.dex_file, info.index));
4680 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004681 }
Vladimir Marko58155012015-08-19 12:49:41 +00004682}
4683
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004684void CodeGeneratorX86::MarkGCCard(Register temp,
4685 Register card,
4686 Register object,
4687 Register value,
4688 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004689 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004690 if (value_can_be_null) {
4691 __ testl(value, value);
4692 __ j(kEqual, &is_null);
4693 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004694 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004695 __ movl(temp, object);
4696 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00004697 __ movb(Address(temp, card, TIMES_1, 0),
4698 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004699 if (value_can_be_null) {
4700 __ Bind(&is_null);
4701 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004702}
4703
Calin Juravle52c48962014-12-16 17:02:57 +00004704void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
4705 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00004706
4707 bool object_field_get_with_read_barrier =
4708 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004709 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004710 new (GetGraph()->GetArena()) LocationSummary(instruction,
4711 kEmitCompilerReadBarrier ?
4712 LocationSummary::kCallOnSlowPath :
4713 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004714 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004715 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004716 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004717 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004718
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004719 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4720 locations->SetOut(Location::RequiresFpuRegister());
4721 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004722 // The output overlaps in case of long: we don't want the low move
4723 // to overwrite the object's location. Likewise, in the case of
4724 // an object field get with read barriers enabled, we do not want
4725 // the move to overwrite the object's location, as we need it to emit
4726 // the read barrier.
4727 locations->SetOut(
4728 Location::RequiresRegister(),
4729 (object_field_get_with_read_barrier || instruction->GetType() == Primitive::kPrimLong) ?
4730 Location::kOutputOverlap :
4731 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004732 }
Calin Juravle52c48962014-12-16 17:02:57 +00004733
4734 if (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) {
4735 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00004736 // So we use an XMM register as a temp to achieve atomicity (first
4737 // load the temp into the XMM and then copy the XMM into the
4738 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00004739 locations->AddTemp(Location::RequiresFpuRegister());
4740 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004741}
4742
Calin Juravle52c48962014-12-16 17:02:57 +00004743void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
4744 const FieldInfo& field_info) {
4745 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004746
Calin Juravle52c48962014-12-16 17:02:57 +00004747 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004748 Location base_loc = locations->InAt(0);
4749 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00004750 Location out = locations->Out();
4751 bool is_volatile = field_info.IsVolatile();
4752 Primitive::Type field_type = field_info.GetFieldType();
4753 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4754
4755 switch (field_type) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004756 case Primitive::kPrimBoolean: {
Calin Juravle52c48962014-12-16 17:02:57 +00004757 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004758 break;
4759 }
4760
4761 case Primitive::kPrimByte: {
Calin Juravle52c48962014-12-16 17:02:57 +00004762 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004763 break;
4764 }
4765
4766 case Primitive::kPrimShort: {
Calin Juravle52c48962014-12-16 17:02:57 +00004767 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004768 break;
4769 }
4770
4771 case Primitive::kPrimChar: {
Calin Juravle52c48962014-12-16 17:02:57 +00004772 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004773 break;
4774 }
4775
4776 case Primitive::kPrimInt:
Calin Juravle52c48962014-12-16 17:02:57 +00004777 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004778 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00004779
4780 case Primitive::kPrimNot: {
4781 // /* HeapReference<Object> */ out = *(base + offset)
4782 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004783 // Note that a potential implicit null check is handled in this
4784 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4785 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004786 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00004787 if (is_volatile) {
4788 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4789 }
4790 } else {
4791 __ movl(out.AsRegister<Register>(), Address(base, offset));
4792 codegen_->MaybeRecordImplicitNullCheck(instruction);
4793 if (is_volatile) {
4794 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4795 }
4796 // If read barriers are enabled, emit read barriers other than
4797 // Baker's using a slow path (and also unpoison the loaded
4798 // reference, if heap poisoning is enabled).
4799 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4800 }
4801 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004802 }
4803
4804 case Primitive::kPrimLong: {
Calin Juravle52c48962014-12-16 17:02:57 +00004805 if (is_volatile) {
4806 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4807 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004808 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004809 __ movd(out.AsRegisterPairLow<Register>(), temp);
4810 __ psrlq(temp, Immediate(32));
4811 __ movd(out.AsRegisterPairHigh<Register>(), temp);
4812 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004813 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00004814 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004815 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004816 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
4817 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004818 break;
4819 }
4820
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004821 case Primitive::kPrimFloat: {
Calin Juravle52c48962014-12-16 17:02:57 +00004822 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004823 break;
4824 }
4825
4826 case Primitive::kPrimDouble: {
Calin Juravle52c48962014-12-16 17:02:57 +00004827 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004828 break;
4829 }
4830
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004831 case Primitive::kPrimVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00004832 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004833 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004834 }
Calin Juravle52c48962014-12-16 17:02:57 +00004835
Roland Levillain7c1559a2015-12-15 10:55:36 +00004836 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimLong) {
4837 // Potential implicit null checks, in the case of reference or
4838 // long fields, are handled in the previous switch statement.
4839 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00004840 codegen_->MaybeRecordImplicitNullCheck(instruction);
4841 }
4842
Calin Juravle52c48962014-12-16 17:02:57 +00004843 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004844 if (field_type == Primitive::kPrimNot) {
4845 // Memory barriers, in the case of references, are also handled
4846 // in the previous switch statement.
4847 } else {
4848 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4849 }
Roland Levillain4d027112015-07-01 15:41:14 +01004850 }
Calin Juravle52c48962014-12-16 17:02:57 +00004851}
4852
4853void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
4854 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4855
4856 LocationSummary* locations =
4857 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4858 locations->SetInAt(0, Location::RequiresRegister());
4859 bool is_volatile = field_info.IsVolatile();
4860 Primitive::Type field_type = field_info.GetFieldType();
4861 bool is_byte_type = (field_type == Primitive::kPrimBoolean)
4862 || (field_type == Primitive::kPrimByte);
4863
4864 // The register allocator does not support multiple
4865 // inputs that die at entry with one in a specific register.
4866 if (is_byte_type) {
4867 // Ensure the value is in a byte register.
4868 locations->SetInAt(1, Location::RegisterLocation(EAX));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004869 } else if (Primitive::IsFloatingPointType(field_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05004870 if (is_volatile && field_type == Primitive::kPrimDouble) {
4871 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4872 locations->SetInAt(1, Location::RequiresFpuRegister());
4873 } else {
4874 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4875 }
4876 } else if (is_volatile && field_type == Primitive::kPrimLong) {
4877 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00004878 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004879
Calin Juravle52c48962014-12-16 17:02:57 +00004880 // 64bits value can be atomically written to an address with movsd and an XMM register.
4881 // We need two XMM registers because there's no easier way to (bit) copy a register pair
4882 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
4883 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
4884 // isolated cases when we need this it isn't worth adding the extra complexity.
4885 locations->AddTemp(Location::RequiresFpuRegister());
4886 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004887 } else {
4888 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4889
4890 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4891 // Temporary registers for the write barrier.
4892 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
4893 // Ensure the card is in a byte register.
4894 locations->AddTemp(Location::RegisterLocation(ECX));
4895 }
Calin Juravle52c48962014-12-16 17:02:57 +00004896 }
4897}
4898
4899void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004900 const FieldInfo& field_info,
4901 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004902 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4903
4904 LocationSummary* locations = instruction->GetLocations();
4905 Register base = locations->InAt(0).AsRegister<Register>();
4906 Location value = locations->InAt(1);
4907 bool is_volatile = field_info.IsVolatile();
4908 Primitive::Type field_type = field_info.GetFieldType();
4909 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01004910 bool needs_write_barrier =
4911 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004912
4913 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004914 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004915 }
4916
Mark Mendell81489372015-11-04 11:30:41 -05004917 bool maybe_record_implicit_null_check_done = false;
4918
Calin Juravle52c48962014-12-16 17:02:57 +00004919 switch (field_type) {
4920 case Primitive::kPrimBoolean:
4921 case Primitive::kPrimByte: {
4922 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
4923 break;
4924 }
4925
4926 case Primitive::kPrimShort:
4927 case Primitive::kPrimChar: {
Mark Mendell81489372015-11-04 11:30:41 -05004928 if (value.IsConstant()) {
4929 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4930 __ movw(Address(base, offset), Immediate(v));
4931 } else {
4932 __ movw(Address(base, offset), value.AsRegister<Register>());
4933 }
Calin Juravle52c48962014-12-16 17:02:57 +00004934 break;
4935 }
4936
4937 case Primitive::kPrimInt:
4938 case Primitive::kPrimNot: {
Roland Levillain4d027112015-07-01 15:41:14 +01004939 if (kPoisonHeapReferences && needs_write_barrier) {
4940 // Note that in the case where `value` is a null reference,
4941 // we do not enter this block, as the reference does not
4942 // need poisoning.
4943 DCHECK_EQ(field_type, Primitive::kPrimNot);
4944 Register temp = locations->GetTemp(0).AsRegister<Register>();
4945 __ movl(temp, value.AsRegister<Register>());
4946 __ PoisonHeapReference(temp);
4947 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05004948 } else if (value.IsConstant()) {
4949 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4950 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01004951 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00004952 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01004953 __ movl(Address(base, offset), value.AsRegister<Register>());
4954 }
Calin Juravle52c48962014-12-16 17:02:57 +00004955 break;
4956 }
4957
4958 case Primitive::kPrimLong: {
4959 if (is_volatile) {
4960 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4961 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
4962 __ movd(temp1, value.AsRegisterPairLow<Register>());
4963 __ movd(temp2, value.AsRegisterPairHigh<Register>());
4964 __ punpckldq(temp1, temp2);
4965 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00004966 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05004967 } else if (value.IsConstant()) {
4968 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
4969 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
4970 codegen_->MaybeRecordImplicitNullCheck(instruction);
4971 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00004972 } else {
4973 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00004974 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004975 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
4976 }
Mark Mendell81489372015-11-04 11:30:41 -05004977 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00004978 break;
4979 }
4980
4981 case Primitive::kPrimFloat: {
Mark Mendell81489372015-11-04 11:30:41 -05004982 if (value.IsConstant()) {
4983 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4984 __ movl(Address(base, offset), Immediate(v));
4985 } else {
4986 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
4987 }
Calin Juravle52c48962014-12-16 17:02:57 +00004988 break;
4989 }
4990
4991 case Primitive::kPrimDouble: {
Mark Mendell81489372015-11-04 11:30:41 -05004992 if (value.IsConstant()) {
4993 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
4994 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
4995 codegen_->MaybeRecordImplicitNullCheck(instruction);
4996 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
4997 maybe_record_implicit_null_check_done = true;
4998 } else {
4999 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5000 }
Calin Juravle52c48962014-12-16 17:02:57 +00005001 break;
5002 }
5003
5004 case Primitive::kPrimVoid:
5005 LOG(FATAL) << "Unreachable type " << field_type;
5006 UNREACHABLE();
5007 }
5008
Mark Mendell81489372015-11-04 11:30:41 -05005009 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005010 codegen_->MaybeRecordImplicitNullCheck(instruction);
5011 }
5012
Roland Levillain4d027112015-07-01 15:41:14 +01005013 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005014 Register temp = locations->GetTemp(0).AsRegister<Register>();
5015 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005016 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005017 }
5018
Calin Juravle52c48962014-12-16 17:02:57 +00005019 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005020 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005021 }
5022}
5023
5024void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5025 HandleFieldGet(instruction, instruction->GetFieldInfo());
5026}
5027
5028void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5029 HandleFieldGet(instruction, instruction->GetFieldInfo());
5030}
5031
5032void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5033 HandleFieldSet(instruction, instruction->GetFieldInfo());
5034}
5035
5036void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005037 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005038}
5039
5040void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5041 HandleFieldSet(instruction, instruction->GetFieldInfo());
5042}
5043
5044void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005045 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005046}
5047
5048void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5049 HandleFieldGet(instruction, instruction->GetFieldInfo());
5050}
5051
5052void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5053 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005054}
5055
Calin Juravlee460d1d2015-09-29 04:52:17 +01005056void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5057 HUnresolvedInstanceFieldGet* instruction) {
5058 FieldAccessCallingConventionX86 calling_convention;
5059 codegen_->CreateUnresolvedFieldLocationSummary(
5060 instruction, instruction->GetFieldType(), calling_convention);
5061}
5062
5063void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5064 HUnresolvedInstanceFieldGet* instruction) {
5065 FieldAccessCallingConventionX86 calling_convention;
5066 codegen_->GenerateUnresolvedFieldAccess(instruction,
5067 instruction->GetFieldType(),
5068 instruction->GetFieldIndex(),
5069 instruction->GetDexPc(),
5070 calling_convention);
5071}
5072
5073void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5074 HUnresolvedInstanceFieldSet* instruction) {
5075 FieldAccessCallingConventionX86 calling_convention;
5076 codegen_->CreateUnresolvedFieldLocationSummary(
5077 instruction, instruction->GetFieldType(), calling_convention);
5078}
5079
5080void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5081 HUnresolvedInstanceFieldSet* instruction) {
5082 FieldAccessCallingConventionX86 calling_convention;
5083 codegen_->GenerateUnresolvedFieldAccess(instruction,
5084 instruction->GetFieldType(),
5085 instruction->GetFieldIndex(),
5086 instruction->GetDexPc(),
5087 calling_convention);
5088}
5089
5090void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5091 HUnresolvedStaticFieldGet* instruction) {
5092 FieldAccessCallingConventionX86 calling_convention;
5093 codegen_->CreateUnresolvedFieldLocationSummary(
5094 instruction, instruction->GetFieldType(), calling_convention);
5095}
5096
5097void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5098 HUnresolvedStaticFieldGet* instruction) {
5099 FieldAccessCallingConventionX86 calling_convention;
5100 codegen_->GenerateUnresolvedFieldAccess(instruction,
5101 instruction->GetFieldType(),
5102 instruction->GetFieldIndex(),
5103 instruction->GetDexPc(),
5104 calling_convention);
5105}
5106
5107void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5108 HUnresolvedStaticFieldSet* instruction) {
5109 FieldAccessCallingConventionX86 calling_convention;
5110 codegen_->CreateUnresolvedFieldLocationSummary(
5111 instruction, instruction->GetFieldType(), calling_convention);
5112}
5113
5114void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5115 HUnresolvedStaticFieldSet* instruction) {
5116 FieldAccessCallingConventionX86 calling_convention;
5117 codegen_->GenerateUnresolvedFieldAccess(instruction,
5118 instruction->GetFieldType(),
5119 instruction->GetFieldIndex(),
5120 instruction->GetDexPc(),
5121 calling_convention);
5122}
5123
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005124void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005125 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5126 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5127 ? Location::RequiresRegister()
5128 : Location::Any();
5129 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005130}
5131
Calin Juravle2ae48182016-03-16 14:05:09 +00005132void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5133 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005134 return;
5135 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005136 LocationSummary* locations = instruction->GetLocations();
5137 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005138
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005139 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005140 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005141}
5142
Calin Juravle2ae48182016-03-16 14:05:09 +00005143void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07005144 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005145 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005146
5147 LocationSummary* locations = instruction->GetLocations();
5148 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005149
5150 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005151 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005152 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005153 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005154 } else {
5155 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005156 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005157 __ jmp(slow_path->GetEntryLabel());
5158 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005159 }
5160 __ j(kEqual, slow_path->GetEntryLabel());
5161}
5162
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005163void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005164 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005165}
5166
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005167void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005168 bool object_array_get_with_read_barrier =
5169 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005170 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00005171 new (GetGraph()->GetArena()) LocationSummary(instruction,
5172 object_array_get_with_read_barrier ?
5173 LocationSummary::kCallOnSlowPath :
5174 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005175 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005176 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005177 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005178 locations->SetInAt(0, Location::RequiresRegister());
5179 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005180 if (Primitive::IsFloatingPointType(instruction->GetType())) {
5181 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5182 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005183 // The output overlaps in case of long: we don't want the low move
5184 // to overwrite the array's location. Likewise, in the case of an
5185 // object array get with read barriers enabled, we do not want the
5186 // move to overwrite the array's location, as we need it to emit
5187 // the read barrier.
5188 locations->SetOut(
5189 Location::RequiresRegister(),
5190 (instruction->GetType() == Primitive::kPrimLong || object_array_get_with_read_barrier) ?
5191 Location::kOutputOverlap :
5192 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005193 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005194}
5195
5196void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5197 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005198 Location obj_loc = locations->InAt(0);
5199 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005200 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005201 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005202 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005203
Calin Juravle77520bc2015-01-12 18:45:46 +00005204 Primitive::Type type = instruction->GetType();
5205 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005206 case Primitive::kPrimBoolean: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005207 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005208 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005209 break;
5210 }
5211
5212 case Primitive::kPrimByte: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005213 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005214 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005215 break;
5216 }
5217
5218 case Primitive::kPrimShort: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005219 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005220 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005221 break;
5222 }
5223
5224 case Primitive::kPrimChar: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005225 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005226 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5227 // Branch cases into compressed and uncompressed for each index's type.
5228 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5229 NearLabel done, not_compressed;
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005230 __ testl(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005231 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005232 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5233 "Expecting 0=compressed, 1=uncompressed");
5234 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005235 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5236 __ jmp(&done);
5237 __ Bind(&not_compressed);
5238 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5239 __ Bind(&done);
5240 } else {
5241 // Common case for charAt of array of char or when string compression's
5242 // feature is turned off.
5243 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5244 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005245 break;
5246 }
5247
Roland Levillain7c1559a2015-12-15 10:55:36 +00005248 case Primitive::kPrimInt: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005249 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005250 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005251 break;
5252 }
5253
Roland Levillain7c1559a2015-12-15 10:55:36 +00005254 case Primitive::kPrimNot: {
5255 static_assert(
5256 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5257 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005258 // /* HeapReference<Object> */ out =
5259 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5260 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005261 // Note that a potential implicit null check is handled in this
5262 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5263 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00005264 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005265 } else {
5266 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005267 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5268 codegen_->MaybeRecordImplicitNullCheck(instruction);
5269 // If read barriers are enabled, emit read barriers other than
5270 // Baker's using a slow path (and also unpoison the loaded
5271 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005272 if (index.IsConstant()) {
5273 uint32_t offset =
5274 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005275 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5276 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005277 codegen_->MaybeGenerateReadBarrierSlow(
5278 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5279 }
5280 }
5281 break;
5282 }
5283
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005284 case Primitive::kPrimLong: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005285 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005286 __ movl(out_loc.AsRegisterPairLow<Register>(),
5287 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5288 codegen_->MaybeRecordImplicitNullCheck(instruction);
5289 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5290 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005291 break;
5292 }
5293
Mark Mendell7c8d0092015-01-26 11:21:33 -05005294 case Primitive::kPrimFloat: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005295 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005296 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005297 break;
5298 }
5299
5300 case Primitive::kPrimDouble: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005301 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005302 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005303 break;
5304 }
5305
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005306 case Primitive::kPrimVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005307 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005308 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005309 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005310
Roland Levillain7c1559a2015-12-15 10:55:36 +00005311 if (type == Primitive::kPrimNot || type == Primitive::kPrimLong) {
5312 // Potential implicit null checks, in the case of reference or
5313 // long arrays, are handled in the previous switch statement.
5314 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005315 codegen_->MaybeRecordImplicitNullCheck(instruction);
5316 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005317}
5318
5319void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005320 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005321
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005322 bool needs_write_barrier =
5323 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005324 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005325
Nicolas Geoffray39468442014-09-02 15:17:15 +01005326 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
5327 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01005328 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005329 LocationSummary::kCallOnSlowPath :
5330 LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005331
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005332 bool is_byte_type = (value_type == Primitive::kPrimBoolean)
5333 || (value_type == Primitive::kPrimByte);
5334 // We need the inputs to be different than the output in case of long operation.
5335 // In case of a byte operation, the register allocator does not support multiple
5336 // inputs that die at entry with one in a specific register.
5337 locations->SetInAt(0, Location::RequiresRegister());
5338 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5339 if (is_byte_type) {
5340 // Ensure the value is in a byte register.
5341 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
5342 } else if (Primitive::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005343 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005344 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005345 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5346 }
5347 if (needs_write_barrier) {
5348 // Temporary registers for the write barrier.
5349 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5350 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005351 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005352 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005353}
5354
5355void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5356 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005357 Location array_loc = locations->InAt(0);
5358 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005359 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005360 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005361 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005362 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5363 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5364 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005365 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005366 bool needs_write_barrier =
5367 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005368
5369 switch (value_type) {
5370 case Primitive::kPrimBoolean:
5371 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005372 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005373 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005374 if (value.IsRegister()) {
5375 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005376 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005377 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005378 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005379 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005380 break;
5381 }
5382
5383 case Primitive::kPrimShort:
5384 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005385 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005386 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005387 if (value.IsRegister()) {
5388 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005389 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005390 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005391 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005392 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005393 break;
5394 }
5395
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005396 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005397 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005398 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005399
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005400 if (!value.IsRegister()) {
5401 // Just setting null.
5402 DCHECK(instruction->InputAt(2)->IsNullConstant());
5403 DCHECK(value.IsConstant()) << value;
5404 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005405 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005406 DCHECK(!needs_write_barrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005407 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005408 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005409 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005410
5411 DCHECK(needs_write_barrier);
5412 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005413 // We cannot use a NearLabel for `done`, as its range may be too
5414 // short when Baker read barriers are enabled.
5415 Label done;
5416 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005417 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01005418 Location temp_loc = locations->GetTemp(0);
5419 Register temp = temp_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005420 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005421 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86(instruction);
5422 codegen_->AddSlowPath(slow_path);
5423 if (instruction->GetValueCanBeNull()) {
5424 __ testl(register_value, register_value);
5425 __ j(kNotEqual, &not_null);
5426 __ movl(address, Immediate(0));
5427 codegen_->MaybeRecordImplicitNullCheck(instruction);
5428 __ jmp(&done);
5429 __ Bind(&not_null);
5430 }
5431
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005432 // Note that when Baker read barriers are enabled, the type
5433 // checks are performed without read barriers. This is fine,
5434 // even in the case where a class object is in the from-space
5435 // after the flip, as a comparison involving such a type would
5436 // not produce a false positive; it may of course produce a
5437 // false negative, in which case we would take the ArraySet
5438 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005439
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005440 // /* HeapReference<Class> */ temp = array->klass_
5441 __ movl(temp, Address(array, class_offset));
5442 codegen_->MaybeRecordImplicitNullCheck(instruction);
5443 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005444
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005445 // /* HeapReference<Class> */ temp = temp->component_type_
5446 __ movl(temp, Address(temp, component_offset));
5447 // If heap poisoning is enabled, no need to unpoison `temp`
5448 // nor the object reference in `register_value->klass`, as
5449 // we are comparing two poisoned references.
5450 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005451
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005452 if (instruction->StaticTypeOfArrayIsObjectArray()) {
5453 __ j(kEqual, &do_put);
5454 // If heap poisoning is enabled, the `temp` reference has
5455 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005456 __ MaybeUnpoisonHeapReference(temp);
5457
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005458 // If heap poisoning is enabled, no need to unpoison the
5459 // heap reference loaded below, as it is only used for a
5460 // comparison with null.
5461 __ cmpl(Address(temp, super_offset), Immediate(0));
5462 __ j(kNotEqual, slow_path->GetEntryLabel());
5463 __ Bind(&do_put);
5464 } else {
5465 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005466 }
5467 }
5468
5469 if (kPoisonHeapReferences) {
5470 __ movl(temp, register_value);
5471 __ PoisonHeapReference(temp);
5472 __ movl(address, temp);
5473 } else {
5474 __ movl(address, register_value);
5475 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005476 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005477 codegen_->MaybeRecordImplicitNullCheck(instruction);
5478 }
5479
5480 Register card = locations->GetTemp(1).AsRegister<Register>();
5481 codegen_->MarkGCCard(
5482 temp, card, array, value.AsRegister<Register>(), instruction->GetValueCanBeNull());
5483 __ Bind(&done);
5484
5485 if (slow_path != nullptr) {
5486 __ Bind(slow_path->GetExitLabel());
5487 }
5488
5489 break;
5490 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005491
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005492 case Primitive::kPrimInt: {
5493 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005494 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005495 if (value.IsRegister()) {
5496 __ movl(address, value.AsRegister<Register>());
5497 } else {
5498 DCHECK(value.IsConstant()) << value;
5499 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5500 __ movl(address, Immediate(v));
5501 }
5502 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005503 break;
5504 }
5505
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005506 case Primitive::kPrimLong: {
5507 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005508 if (value.IsRegisterPair()) {
5509 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5510 value.AsRegisterPairLow<Register>());
5511 codegen_->MaybeRecordImplicitNullCheck(instruction);
5512 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5513 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005514 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005515 DCHECK(value.IsConstant());
5516 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
5517 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5518 Immediate(Low32Bits(val)));
5519 codegen_->MaybeRecordImplicitNullCheck(instruction);
5520 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5521 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005522 }
5523 break;
5524 }
5525
Mark Mendell7c8d0092015-01-26 11:21:33 -05005526 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005527 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005528 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005529 if (value.IsFpuRegister()) {
5530 __ movss(address, value.AsFpuRegister<XmmRegister>());
5531 } else {
5532 DCHECK(value.IsConstant());
5533 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
5534 __ movl(address, Immediate(v));
5535 }
5536 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005537 break;
5538 }
5539
5540 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005541 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005542 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005543 if (value.IsFpuRegister()) {
5544 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5545 } else {
5546 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005547 Address address_hi =
5548 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05005549 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5550 __ movl(address, Immediate(Low32Bits(v)));
5551 codegen_->MaybeRecordImplicitNullCheck(instruction);
5552 __ movl(address_hi, Immediate(High32Bits(v)));
5553 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005554 break;
5555 }
5556
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005557 case Primitive::kPrimVoid:
5558 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005559 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005560 }
5561}
5562
5563void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
5564 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005565 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005566 if (!instruction->IsEmittedAtUseSite()) {
5567 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5568 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005569}
5570
5571void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005572 if (instruction->IsEmittedAtUseSite()) {
5573 return;
5574 }
5575
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005576 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005577 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005578 Register obj = locations->InAt(0).AsRegister<Register>();
5579 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005580 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005581 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005582 // Mask out most significant bit in case the array is String's array of char.
5583 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005584 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005585 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005586}
5587
5588void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005589 RegisterSet caller_saves = RegisterSet::Empty();
5590 InvokeRuntimeCallingConvention calling_convention;
5591 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5592 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5593 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005594 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005595 HInstruction* length = instruction->InputAt(1);
5596 if (!length->IsEmittedAtUseSite()) {
5597 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5598 }
jessicahandojo4877b792016-09-08 19:49:13 -07005599 // Need register to see array's length.
5600 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5601 locations->AddTemp(Location::RequiresRegister());
5602 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005603}
5604
5605void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07005606 const bool is_string_compressed_char_at =
5607 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005608 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005609 Location index_loc = locations->InAt(0);
5610 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005611 SlowPathCode* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005612 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005613
Mark Mendell99dbd682015-04-22 16:18:52 -04005614 if (length_loc.IsConstant()) {
5615 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5616 if (index_loc.IsConstant()) {
5617 // BCE will remove the bounds check if we are guarenteed to pass.
5618 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5619 if (index < 0 || index >= length) {
5620 codegen_->AddSlowPath(slow_path);
5621 __ jmp(slow_path->GetEntryLabel());
5622 } else {
5623 // Some optimization after BCE may have generated this, and we should not
5624 // generate a bounds check if it is a valid range.
5625 }
5626 return;
5627 }
5628
5629 // We have to reverse the jump condition because the length is the constant.
5630 Register index_reg = index_loc.AsRegister<Register>();
5631 __ cmpl(index_reg, Immediate(length));
5632 codegen_->AddSlowPath(slow_path);
5633 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005634 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005635 HInstruction* array_length = instruction->InputAt(1);
5636 if (array_length->IsEmittedAtUseSite()) {
5637 // Address the length field in the array.
5638 DCHECK(array_length->IsArrayLength());
5639 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5640 Location array_loc = array_length->GetLocations()->InAt(0);
5641 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005642 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005643 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5644 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005645 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
5646 __ movl(length_reg, array_len);
5647 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005648 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005649 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005650 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005651 // Checking bounds for general case:
5652 // Array of char or string's array with feature compression off.
5653 if (index_loc.IsConstant()) {
5654 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5655 __ cmpl(array_len, Immediate(value));
5656 } else {
5657 __ cmpl(array_len, index_loc.AsRegister<Register>());
5658 }
5659 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005660 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005661 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005662 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005663 }
5664 codegen_->AddSlowPath(slow_path);
5665 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005666 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005667}
5668
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005669void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005670 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01005671}
5672
5673void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005674 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5675}
5676
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005677void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005678 LocationSummary* locations =
5679 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005680 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005681}
5682
5683void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005684 HBasicBlock* block = instruction->GetBlock();
5685 if (block->GetLoopInformation() != nullptr) {
5686 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5687 // The back edge will generate the suspend check.
5688 return;
5689 }
5690 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5691 // The goto will generate the suspend check.
5692 return;
5693 }
5694 GenerateSuspendCheck(instruction, nullptr);
5695}
5696
5697void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
5698 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005699 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005700 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
5701 if (slow_path == nullptr) {
5702 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
5703 instruction->SetSlowPath(slow_path);
5704 codegen_->AddSlowPath(slow_path);
5705 if (successor != nullptr) {
5706 DCHECK(successor->IsLoopHeader());
5707 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5708 }
5709 } else {
5710 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5711 }
5712
Andreas Gampe542451c2016-07-26 09:02:02 -07005713 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00005714 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005715 if (successor == nullptr) {
5716 __ j(kNotEqual, slow_path->GetEntryLabel());
5717 __ Bind(slow_path->GetReturnLabel());
5718 } else {
5719 __ j(kEqual, codegen_->GetLabelOf(successor));
5720 __ jmp(slow_path->GetEntryLabel());
5721 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005722}
5723
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005724X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
5725 return codegen_->GetAssembler();
5726}
5727
Mark Mendell7c8d0092015-01-26 11:21:33 -05005728void ParallelMoveResolverX86::MoveMemoryToMemory32(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005729 ScratchRegisterScope ensure_scratch(
5730 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5731 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5732 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5733 __ movl(temp_reg, Address(ESP, src + stack_offset));
5734 __ movl(Address(ESP, dst + stack_offset), temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005735}
5736
5737void ParallelMoveResolverX86::MoveMemoryToMemory64(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005738 ScratchRegisterScope ensure_scratch(
5739 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5740 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5741 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5742 __ movl(temp_reg, Address(ESP, src + stack_offset));
5743 __ movl(Address(ESP, dst + stack_offset), temp_reg);
5744 __ movl(temp_reg, Address(ESP, src + stack_offset + kX86WordSize));
5745 __ movl(Address(ESP, dst + stack_offset + kX86WordSize), temp_reg);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005746}
5747
5748void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005749 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005750 Location source = move->GetSource();
5751 Location destination = move->GetDestination();
5752
5753 if (source.IsRegister()) {
5754 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005755 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005756 } else if (destination.IsFpuRegister()) {
5757 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005758 } else {
5759 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005760 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005761 }
David Brazdil74eb1b22015-12-14 11:44:01 +00005762 } else if (source.IsRegisterPair()) {
5763 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
5764 // Create stack space for 2 elements.
5765 __ subl(ESP, Immediate(2 * elem_size));
5766 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
5767 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
5768 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
5769 // And remove the temporary stack space we allocated.
5770 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005771 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005772 if (destination.IsRegister()) {
5773 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
5774 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005775 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005776 } else if (destination.IsRegisterPair()) {
5777 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
5778 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
5779 __ psrlq(src_reg, Immediate(32));
5780 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005781 } else if (destination.IsStackSlot()) {
5782 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
5783 } else {
5784 DCHECK(destination.IsDoubleStackSlot());
5785 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
5786 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005787 } else if (source.IsStackSlot()) {
5788 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005789 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005790 } else if (destination.IsFpuRegister()) {
5791 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005792 } else {
5793 DCHECK(destination.IsStackSlot());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005794 MoveMemoryToMemory32(destination.GetStackIndex(), source.GetStackIndex());
5795 }
5796 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005797 if (destination.IsRegisterPair()) {
5798 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
5799 __ movl(destination.AsRegisterPairHigh<Register>(),
5800 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
5801 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005802 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
5803 } else {
5804 DCHECK(destination.IsDoubleStackSlot()) << destination;
5805 MoveMemoryToMemory64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005806 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005807 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005808 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005809 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005810 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005811 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005812 if (value == 0) {
5813 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
5814 } else {
5815 __ movl(destination.AsRegister<Register>(), Immediate(value));
5816 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005817 } else {
5818 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05005819 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005820 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005821 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005822 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005823 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005824 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005825 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005826 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5827 if (value == 0) {
5828 // Easy handling of 0.0.
5829 __ xorps(dest, dest);
5830 } else {
5831 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005832 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5833 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
5834 __ movl(temp, Immediate(value));
5835 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005836 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005837 } else {
5838 DCHECK(destination.IsStackSlot()) << destination;
5839 __ movl(Address(ESP, destination.GetStackIndex()), imm);
5840 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005841 } else if (constant->IsLongConstant()) {
5842 int64_t value = constant->AsLongConstant()->GetValue();
5843 int32_t low_value = Low32Bits(value);
5844 int32_t high_value = High32Bits(value);
5845 Immediate low(low_value);
5846 Immediate high(high_value);
5847 if (destination.IsDoubleStackSlot()) {
5848 __ movl(Address(ESP, destination.GetStackIndex()), low);
5849 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5850 } else {
5851 __ movl(destination.AsRegisterPairLow<Register>(), low);
5852 __ movl(destination.AsRegisterPairHigh<Register>(), high);
5853 }
5854 } else {
5855 DCHECK(constant->IsDoubleConstant());
5856 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005857 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005858 int32_t low_value = Low32Bits(value);
5859 int32_t high_value = High32Bits(value);
5860 Immediate low(low_value);
5861 Immediate high(high_value);
5862 if (destination.IsFpuRegister()) {
5863 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5864 if (value == 0) {
5865 // Easy handling of 0.0.
5866 __ xorpd(dest, dest);
5867 } else {
5868 __ pushl(high);
5869 __ pushl(low);
5870 __ movsd(dest, Address(ESP, 0));
5871 __ addl(ESP, Immediate(8));
5872 }
5873 } else {
5874 DCHECK(destination.IsDoubleStackSlot()) << destination;
5875 __ movl(Address(ESP, destination.GetStackIndex()), low);
5876 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5877 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005878 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005879 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00005880 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005881 }
5882}
5883
Mark Mendella5c19ce2015-04-01 12:51:05 -04005884void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005885 Register suggested_scratch = reg == EAX ? EBX : EAX;
5886 ScratchRegisterScope ensure_scratch(
5887 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
5888
5889 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5890 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
5891 __ movl(Address(ESP, mem + stack_offset), reg);
5892 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005893}
5894
Mark Mendell7c8d0092015-01-26 11:21:33 -05005895void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005896 ScratchRegisterScope ensure_scratch(
5897 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5898
5899 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5900 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5901 __ movl(temp_reg, Address(ESP, mem + stack_offset));
5902 __ movss(Address(ESP, mem + stack_offset), reg);
5903 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005904}
5905
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005906void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005907 ScratchRegisterScope ensure_scratch1(
5908 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005909
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005910 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
5911 ScratchRegisterScope ensure_scratch2(
5912 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005913
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005914 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
5915 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
5916 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
5917 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
5918 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
5919 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005920}
5921
5922void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005923 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005924 Location source = move->GetSource();
5925 Location destination = move->GetDestination();
5926
5927 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04005928 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
5929 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
5930 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
5931 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
5932 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005933 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005934 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005935 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005936 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005937 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
5938 Exchange(destination.GetStackIndex(), source.GetStackIndex());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005939 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
5940 // Use XOR Swap algorithm to avoid a temporary.
5941 DCHECK_NE(source.reg(), destination.reg());
5942 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5943 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5944 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5945 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
5946 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5947 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
5948 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005949 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
5950 // Take advantage of the 16 bytes in the XMM register.
5951 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
5952 Address stack(ESP, destination.GetStackIndex());
5953 // Load the double into the high doubleword.
5954 __ movhpd(reg, stack);
5955
5956 // Store the low double into the destination.
5957 __ movsd(stack, reg);
5958
5959 // Move the high double to the low double.
5960 __ psrldq(reg, Immediate(8));
5961 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
5962 // Take advantage of the 16 bytes in the XMM register.
5963 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
5964 Address stack(ESP, source.GetStackIndex());
5965 // Load the double into the high doubleword.
5966 __ movhpd(reg, stack);
5967
5968 // Store the low double into the destination.
5969 __ movsd(stack, reg);
5970
5971 // Move the high double to the low double.
5972 __ psrldq(reg, Immediate(8));
5973 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
5974 Exchange(destination.GetStackIndex(), source.GetStackIndex());
5975 Exchange(destination.GetHighStackIndex(kX86WordSize), source.GetHighStackIndex(kX86WordSize));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005976 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005977 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005978 }
5979}
5980
5981void ParallelMoveResolverX86::SpillScratch(int reg) {
5982 __ pushl(static_cast<Register>(reg));
5983}
5984
5985void ParallelMoveResolverX86::RestoreScratch(int reg) {
5986 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01005987}
5988
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005989HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
5990 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005991 switch (desired_class_load_kind) {
5992 case HLoadClass::LoadKind::kReferrersClass:
5993 break;
5994 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
5995 DCHECK(!GetCompilerOptions().GetCompilePic());
5996 break;
5997 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
5998 DCHECK(GetCompilerOptions().GetCompilePic());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005999 FALLTHROUGH_INTENDED;
6000 case HLoadClass::LoadKind::kBssEntry:
6001 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006002 // We disable pc-relative load when there is an irreducible loop, as the optimization
6003 // is incompatible with it.
6004 // TODO: Create as many X86ComputeBaseMethodAddress instructions as needed for methods
6005 // with irreducible loops.
6006 if (GetGraph()->HasIrreducibleLoops()) {
6007 return HLoadClass::LoadKind::kDexCacheViaMethod;
6008 }
6009 break;
6010 case HLoadClass::LoadKind::kBootImageAddress:
6011 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006012 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006013 DCHECK(Runtime::Current()->UseJitCompilation());
6014 break;
6015 case HLoadClass::LoadKind::kDexCacheViaMethod:
6016 break;
6017 }
6018 return desired_class_load_kind;
6019}
6020
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006021void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006022 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
6023 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006024 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006025 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006026 cls,
6027 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006028 Location::RegisterLocation(EAX));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006029 return;
6030 }
Vladimir Marko41559982017-01-06 14:04:23 +00006031 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006032
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006033 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6034 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006035 ? LocationSummary::kCallOnSlowPath
6036 : LocationSummary::kNoCall;
6037 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006038 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006039 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006040 }
6041
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006042 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006043 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
6044 load_kind == HLoadClass::LoadKind::kBssEntry) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006045 locations->SetInAt(0, Location::RequiresRegister());
6046 }
6047 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006048}
6049
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006050Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
6051 dex::TypeIndex dex_index,
6052 uint64_t address) {
6053 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
6054 // Add a patch entry and return the label.
6055 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
6056 PatchInfo<Label>* info = &jit_class_patches_.back();
6057 return &info->label;
6058}
6059
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006060void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006061 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
6062 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
6063 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006064 return;
6065 }
Vladimir Marko41559982017-01-06 14:04:23 +00006066 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006067
Vladimir Marko41559982017-01-06 14:04:23 +00006068 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006069 Location out_loc = locations->Out();
6070 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006071
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006072 bool generate_null_check = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006073 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6074 ? kWithoutReadBarrier
6075 : kCompilerReadBarrierOption;
Vladimir Marko41559982017-01-06 14:04:23 +00006076 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006077 case HLoadClass::LoadKind::kReferrersClass: {
6078 DCHECK(!cls->CanCallRuntime());
6079 DCHECK(!cls->MustGenerateClinitCheck());
6080 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6081 Register current_method = locations->InAt(0).AsRegister<Register>();
6082 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006083 cls,
6084 out_loc,
6085 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01006086 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006087 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006088 break;
6089 }
6090 case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006091 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006092 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006093 __ movl(out, Immediate(/* placeholder */ 0));
6094 codegen_->RecordTypePatch(cls);
6095 break;
6096 }
6097 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006098 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006099 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006100 Register method_address = locations->InAt(0).AsRegister<Register>();
6101 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
6102 codegen_->RecordTypePatch(cls);
6103 break;
6104 }
6105 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006106 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006107 DCHECK_NE(cls->GetAddress(), 0u);
6108 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
6109 __ movl(out, Immediate(address));
6110 codegen_->RecordSimplePatch();
6111 break;
6112 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006113 case HLoadClass::LoadKind::kBssEntry: {
6114 Register method_address = locations->InAt(0).AsRegister<Register>();
6115 Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6116 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6117 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
6118 generate_null_check = true;
6119 break;
6120 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006121 case HLoadClass::LoadKind::kJitTableAddress: {
6122 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6123 Label* fixup_label = codegen_->NewJitRootClassPatch(
6124 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006125 // /* GcRoot<mirror::Class> */ out = *address
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006126 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006127 break;
6128 }
Vladimir Marko41559982017-01-06 14:04:23 +00006129 case HLoadClass::LoadKind::kDexCacheViaMethod:
6130 LOG(FATAL) << "UNREACHABLE";
6131 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006132 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006133
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006134 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6135 DCHECK(cls->CanCallRuntime());
6136 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
6137 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
6138 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006139
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006140 if (generate_null_check) {
6141 __ testl(out, out);
6142 __ j(kEqual, slow_path->GetEntryLabel());
6143 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006144
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006145 if (cls->MustGenerateClinitCheck()) {
6146 GenerateClassInitializationCheck(slow_path, out);
6147 } else {
6148 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006149 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006150 }
6151}
6152
6153void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6154 LocationSummary* locations =
6155 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
6156 locations->SetInAt(0, Location::RequiresRegister());
6157 if (check->HasUses()) {
6158 locations->SetOut(Location::SameAsFirstInput());
6159 }
6160}
6161
6162void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006163 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07006164 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006165 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006166 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006167 GenerateClassInitializationCheck(slow_path,
6168 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006169}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006170
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006171void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006172 SlowPathCode* slow_path, Register class_reg) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006173 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
6174 Immediate(mirror::Class::kStatusInitialized));
6175 __ j(kLess, slow_path->GetEntryLabel());
6176 __ Bind(slow_path->GetExitLabel());
6177 // No need for memory fence, thanks to the X86 memory model.
6178}
6179
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006180HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6181 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006182 switch (desired_string_load_kind) {
6183 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
6184 DCHECK(!GetCompilerOptions().GetCompilePic());
6185 break;
6186 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
6187 DCHECK(GetCompilerOptions().GetCompilePic());
6188 FALLTHROUGH_INTENDED;
Vladimir Markoaad75c62016-10-03 08:46:48 +00006189 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006190 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006191 // We disable pc-relative load when there is an irreducible loop, as the optimization
6192 // is incompatible with it.
6193 // TODO: Create as many X86ComputeBaseMethodAddress instructions as needed for methods
6194 // with irreducible loops.
6195 if (GetGraph()->HasIrreducibleLoops()) {
6196 return HLoadString::LoadKind::kDexCacheViaMethod;
6197 }
6198 break;
6199 case HLoadString::LoadKind::kBootImageAddress:
6200 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006201 case HLoadString::LoadKind::kJitTableAddress:
6202 DCHECK(Runtime::Current()->UseJitCompilation());
6203 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006204 case HLoadString::LoadKind::kDexCacheViaMethod:
6205 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006206 }
6207 return desired_string_load_kind;
6208}
6209
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006210void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006211 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006212 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006213 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006214 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006215 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006216 locations->SetInAt(0, Location::RequiresRegister());
6217 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006218 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
6219 locations->SetOut(Location::RegisterLocation(EAX));
6220 } else {
6221 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006222 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6223 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6224 // Rely on the pResolveString and/or marking to save everything.
6225 RegisterSet caller_saves = RegisterSet::Empty();
6226 InvokeRuntimeCallingConvention calling_convention;
6227 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6228 locations->SetCustomSlowPathCallerSaves(caller_saves);
6229 } else {
6230 // For non-Baker read barrier we have a temp-clobbering call.
6231 }
6232 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006233 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006234}
6235
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006236Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006237 dex::StringIndex dex_index,
6238 Handle<mirror::String> handle) {
6239 jit_string_roots_.Overwrite(
6240 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006241 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006242 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006243 PatchInfo<Label>* info = &jit_string_patches_.back();
6244 return &info->label;
6245}
6246
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006247// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6248// move.
6249void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006250 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006251 Location out_loc = locations->Out();
6252 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006253
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006254 switch (load->GetLoadKind()) {
6255 case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006256 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006257 __ movl(out, Immediate(/* placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006258 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006259 return; // No dex cache slow path.
6260 }
6261 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006262 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006263 Register method_address = locations->InAt(0).AsRegister<Register>();
6264 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006265 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006266 return; // No dex cache slow path.
6267 }
6268 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006269 uint32_t address = dchecked_integral_cast<uint32_t>(
6270 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6271 DCHECK_NE(address, 0u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006272 __ movl(out, Immediate(address));
6273 codegen_->RecordSimplePatch();
6274 return; // No dex cache slow path.
6275 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006276 case HLoadString::LoadKind::kBssEntry: {
6277 Register method_address = locations->InAt(0).AsRegister<Register>();
6278 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6279 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006280 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006281 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006282 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
6283 codegen_->AddSlowPath(slow_path);
6284 __ testl(out, out);
6285 __ j(kEqual, slow_path->GetEntryLabel());
6286 __ Bind(slow_path->GetExitLabel());
6287 return;
6288 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006289 case HLoadString::LoadKind::kJitTableAddress: {
6290 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6291 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006292 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006293 // /* GcRoot<mirror::String> */ out = *address
6294 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6295 return;
6296 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006297 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006298 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006299 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006300
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006301 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006302 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006303 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006304 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006305 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6306 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006307}
6308
David Brazdilcb1c0552015-08-04 16:22:25 +01006309static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006310 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006311}
6312
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006313void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6314 LocationSummary* locations =
6315 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6316 locations->SetOut(Location::RequiresRegister());
6317}
6318
6319void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006320 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6321}
6322
6323void LocationsBuilderX86::VisitClearException(HClearException* clear) {
6324 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6325}
6326
6327void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6328 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006329}
6330
6331void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
6332 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006333 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006334 InvokeRuntimeCallingConvention calling_convention;
6335 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6336}
6337
6338void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006339 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006340 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006341}
6342
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006343// Temp is used for read barrier.
6344static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6345 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006346 !kUseBakerReadBarrier &&
6347 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006348 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006349 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6350 return 1;
6351 }
6352 return 0;
6353}
6354
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006355// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006356// interface pointer, one for loading the current interface.
6357// The other checks have one temp for loading the object's class.
6358static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6359 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
6360 return 2;
6361 }
6362 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006363}
6364
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006365void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006366 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006367 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006368 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006369 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006370 case TypeCheckKind::kExactCheck:
6371 case TypeCheckKind::kAbstractClassCheck:
6372 case TypeCheckKind::kClassHierarchyCheck:
6373 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006374 call_kind =
6375 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01006376 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006377 break;
6378 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006379 case TypeCheckKind::kUnresolvedCheck:
6380 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006381 call_kind = LocationSummary::kCallOnSlowPath;
6382 break;
6383 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006384
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006385 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006386 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006387 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006388 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006389 locations->SetInAt(0, Location::RequiresRegister());
6390 locations->SetInAt(1, Location::Any());
6391 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6392 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006393 // When read barriers are enabled, we need a temporary register for some cases.
6394 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006395}
6396
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006397void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006398 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006399 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006400 Location obj_loc = locations->InAt(0);
6401 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006402 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006403 Location out_loc = locations->Out();
6404 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006405 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6406 DCHECK_LE(num_temps, 1u);
6407 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006408 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006409 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6410 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6411 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006412 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006413 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006414
6415 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006416 // Avoid null check if we know obj is not null.
6417 if (instruction->MustDoNullCheck()) {
6418 __ testl(obj, obj);
6419 __ j(kEqual, &zero);
6420 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006421
Roland Levillain7c1559a2015-12-15 10:55:36 +00006422 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006423 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006424 // /* HeapReference<Class> */ out = obj->klass_
6425 GenerateReferenceLoadTwoRegisters(instruction,
6426 out_loc,
6427 obj_loc,
6428 class_offset,
6429 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006430 if (cls.IsRegister()) {
6431 __ cmpl(out, cls.AsRegister<Register>());
6432 } else {
6433 DCHECK(cls.IsStackSlot()) << cls;
6434 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6435 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006436
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006437 // Classes must be equal for the instanceof to succeed.
6438 __ j(kNotEqual, &zero);
6439 __ movl(out, Immediate(1));
6440 __ jmp(&done);
6441 break;
6442 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006443
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006444 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006445 // /* HeapReference<Class> */ out = obj->klass_
6446 GenerateReferenceLoadTwoRegisters(instruction,
6447 out_loc,
6448 obj_loc,
6449 class_offset,
6450 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006451 // If the class is abstract, we eagerly fetch the super class of the
6452 // object to avoid doing a comparison we know will fail.
6453 NearLabel loop;
6454 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006455 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006456 GenerateReferenceLoadOneRegister(instruction,
6457 out_loc,
6458 super_offset,
6459 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006460 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006461 __ testl(out, out);
6462 // If `out` is null, we use it for the result, and jump to `done`.
6463 __ j(kEqual, &done);
6464 if (cls.IsRegister()) {
6465 __ cmpl(out, cls.AsRegister<Register>());
6466 } else {
6467 DCHECK(cls.IsStackSlot()) << cls;
6468 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6469 }
6470 __ j(kNotEqual, &loop);
6471 __ movl(out, Immediate(1));
6472 if (zero.IsLinked()) {
6473 __ jmp(&done);
6474 }
6475 break;
6476 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006477
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006478 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006479 // /* HeapReference<Class> */ out = obj->klass_
6480 GenerateReferenceLoadTwoRegisters(instruction,
6481 out_loc,
6482 obj_loc,
6483 class_offset,
6484 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006485 // Walk over the class hierarchy to find a match.
6486 NearLabel loop, success;
6487 __ Bind(&loop);
6488 if (cls.IsRegister()) {
6489 __ cmpl(out, cls.AsRegister<Register>());
6490 } else {
6491 DCHECK(cls.IsStackSlot()) << cls;
6492 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6493 }
6494 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006495 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006496 GenerateReferenceLoadOneRegister(instruction,
6497 out_loc,
6498 super_offset,
6499 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006500 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006501 __ testl(out, out);
6502 __ j(kNotEqual, &loop);
6503 // If `out` is null, we use it for the result, and jump to `done`.
6504 __ jmp(&done);
6505 __ Bind(&success);
6506 __ movl(out, Immediate(1));
6507 if (zero.IsLinked()) {
6508 __ jmp(&done);
6509 }
6510 break;
6511 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006512
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006513 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006514 // /* HeapReference<Class> */ out = obj->klass_
6515 GenerateReferenceLoadTwoRegisters(instruction,
6516 out_loc,
6517 obj_loc,
6518 class_offset,
6519 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006520 // Do an exact check.
6521 NearLabel exact_check;
6522 if (cls.IsRegister()) {
6523 __ cmpl(out, cls.AsRegister<Register>());
6524 } else {
6525 DCHECK(cls.IsStackSlot()) << cls;
6526 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6527 }
6528 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006529 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006530 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006531 GenerateReferenceLoadOneRegister(instruction,
6532 out_loc,
6533 component_offset,
6534 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006535 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006536 __ testl(out, out);
6537 // If `out` is null, we use it for the result, and jump to `done`.
6538 __ j(kEqual, &done);
6539 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6540 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006541 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006542 __ movl(out, Immediate(1));
6543 __ jmp(&done);
6544 break;
6545 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006546
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006547 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006548 // No read barrier since the slow path will retry upon failure.
6549 // /* HeapReference<Class> */ out = obj->klass_
6550 GenerateReferenceLoadTwoRegisters(instruction,
6551 out_loc,
6552 obj_loc,
6553 class_offset,
6554 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006555 if (cls.IsRegister()) {
6556 __ cmpl(out, cls.AsRegister<Register>());
6557 } else {
6558 DCHECK(cls.IsStackSlot()) << cls;
6559 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6560 }
6561 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006562 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6563 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006564 codegen_->AddSlowPath(slow_path);
6565 __ j(kNotEqual, slow_path->GetEntryLabel());
6566 __ movl(out, Immediate(1));
6567 if (zero.IsLinked()) {
6568 __ jmp(&done);
6569 }
6570 break;
6571 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006572
Calin Juravle98893e12015-10-02 21:05:03 +01006573 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006574 case TypeCheckKind::kInterfaceCheck: {
6575 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006576 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006577 // cases.
6578 //
6579 // We cannot directly call the InstanceofNonTrivial runtime
6580 // entry point without resorting to a type checking slow path
6581 // here (i.e. by calling InvokeRuntime directly), as it would
6582 // require to assign fixed registers for the inputs of this
6583 // HInstanceOf instruction (following the runtime calling
6584 // convention), which might be cluttered by the potential first
6585 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006586 //
6587 // TODO: Introduce a new runtime entry point taking the object
6588 // to test (instead of its class) as argument, and let it deal
6589 // with the read barrier issues. This will let us refactor this
6590 // case of the `switch` code as it was previously (with a direct
6591 // call to the runtime not using a type checking slow path).
6592 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006593 DCHECK(locations->OnlyCallsOnSlowPath());
6594 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6595 /* is_fatal */ false);
6596 codegen_->AddSlowPath(slow_path);
6597 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006598 if (zero.IsLinked()) {
6599 __ jmp(&done);
6600 }
6601 break;
6602 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006603 }
6604
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006605 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006606 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006607 __ xorl(out, out);
6608 }
6609
6610 if (done.IsLinked()) {
6611 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006612 }
6613
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006614 if (slow_path != nullptr) {
6615 __ Bind(slow_path->GetExitLabel());
6616 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006617}
6618
Mathieu Chartier5ac321b2016-11-09 16:33:54 -08006619static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
6620 switch (type_check_kind) {
6621 case TypeCheckKind::kExactCheck:
6622 case TypeCheckKind::kAbstractClassCheck:
6623 case TypeCheckKind::kClassHierarchyCheck:
6624 case TypeCheckKind::kArrayObjectCheck:
6625 return !throws_into_catch && !kEmitCompilerReadBarrier;
6626 case TypeCheckKind::kInterfaceCheck:
6627 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
6628 case TypeCheckKind::kArrayCheck:
6629 case TypeCheckKind::kUnresolvedCheck:
6630 return false;
6631 }
6632 LOG(FATAL) << "Unreachable";
6633 UNREACHABLE();
6634}
6635
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006636void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006637 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006638 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Mathieu Chartier5ac321b2016-11-09 16:33:54 -08006639 LocationSummary::CallKind call_kind =
6640 IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch)
6641 ? LocationSummary::kNoCall
6642 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006643 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6644 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006645 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6646 // Require a register for the interface check since there is a loop that compares the class to
6647 // a memory address.
6648 locations->SetInAt(1, Location::RequiresRegister());
6649 } else {
6650 locations->SetInAt(1, Location::Any());
6651 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006652 // Note that TypeCheckSlowPathX86 uses this "temp" register too.
6653 locations->AddTemp(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006654 // When read barriers are enabled, we need an additional temporary register for some cases.
6655 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
6656}
6657
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006658void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006659 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006660 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006661 Location obj_loc = locations->InAt(0);
6662 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006663 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006664 Location temp_loc = locations->GetTemp(0);
6665 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006666 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6667 DCHECK_GE(num_temps, 1u);
6668 DCHECK_LE(num_temps, 2u);
6669 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
6670 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6671 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6672 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6673 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6674 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6675 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
6676 const uint32_t object_array_data_offset =
6677 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006678
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006679 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6680 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6681 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006682 bool is_type_check_slow_path_fatal =
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006683 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
6684
Roland Levillain0d5a2812015-11-13 10:07:31 +00006685 SlowPathCode* type_check_slow_path =
6686 new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6687 is_type_check_slow_path_fatal);
6688 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006689
Roland Levillain0d5a2812015-11-13 10:07:31 +00006690 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006691 // Avoid null check if we know obj is not null.
6692 if (instruction->MustDoNullCheck()) {
6693 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006694 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006695 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006696
Roland Levillain0d5a2812015-11-13 10:07:31 +00006697 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006698 case TypeCheckKind::kExactCheck:
6699 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006700 // /* HeapReference<Class> */ temp = obj->klass_
6701 GenerateReferenceLoadTwoRegisters(instruction,
6702 temp_loc,
6703 obj_loc,
6704 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006705 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006706
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006707 if (cls.IsRegister()) {
6708 __ cmpl(temp, cls.AsRegister<Register>());
6709 } else {
6710 DCHECK(cls.IsStackSlot()) << cls;
6711 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6712 }
6713 // Jump to slow path for throwing the exception or doing a
6714 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006715 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006716 break;
6717 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006718
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006719 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006720 // /* HeapReference<Class> */ temp = obj->klass_
6721 GenerateReferenceLoadTwoRegisters(instruction,
6722 temp_loc,
6723 obj_loc,
6724 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006725 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006726
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006727 // If the class is abstract, we eagerly fetch the super class of the
6728 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006729 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006730 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006731 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006732 GenerateReferenceLoadOneRegister(instruction,
6733 temp_loc,
6734 super_offset,
6735 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006736 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006737
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006738 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6739 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006740 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006741 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006742
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006743 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006744 if (cls.IsRegister()) {
6745 __ cmpl(temp, cls.AsRegister<Register>());
6746 } else {
6747 DCHECK(cls.IsStackSlot()) << cls;
6748 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6749 }
6750 __ j(kNotEqual, &loop);
6751 break;
6752 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006753
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006754 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006755 // /* HeapReference<Class> */ temp = obj->klass_
6756 GenerateReferenceLoadTwoRegisters(instruction,
6757 temp_loc,
6758 obj_loc,
6759 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006760 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006762 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006763 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006764 __ Bind(&loop);
6765 if (cls.IsRegister()) {
6766 __ cmpl(temp, cls.AsRegister<Register>());
6767 } else {
6768 DCHECK(cls.IsStackSlot()) << cls;
6769 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6770 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006771 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006772
Roland Levillain0d5a2812015-11-13 10:07:31 +00006773 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006774 GenerateReferenceLoadOneRegister(instruction,
6775 temp_loc,
6776 super_offset,
6777 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006778 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006779
6780 // If the class reference currently in `temp` is not null, jump
6781 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006782 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006783 __ j(kNotZero, &loop);
6784 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006785 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006786 break;
6787 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006788
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006789 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006790 // /* HeapReference<Class> */ temp = obj->klass_
6791 GenerateReferenceLoadTwoRegisters(instruction,
6792 temp_loc,
6793 obj_loc,
6794 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006795 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006796
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006797 // Do an exact check.
6798 if (cls.IsRegister()) {
6799 __ cmpl(temp, cls.AsRegister<Register>());
6800 } else {
6801 DCHECK(cls.IsStackSlot()) << cls;
6802 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6803 }
6804 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006805
6806 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006807 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006808 GenerateReferenceLoadOneRegister(instruction,
6809 temp_loc,
6810 component_offset,
6811 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006812 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006813
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006814 // If the component type is null (i.e. the object not an array), jump to the slow path to
6815 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006816 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006817 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006818
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006819 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006820 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006821 break;
6822 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006823
Calin Juravle98893e12015-10-02 21:05:03 +01006824 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006825 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006826 // We cannot directly call the CheckCast runtime entry point
6827 // without resorting to a type checking slow path here (i.e. by
6828 // calling InvokeRuntime directly), as it would require to
6829 // assign fixed registers for the inputs of this HInstanceOf
6830 // instruction (following the runtime calling convention), which
6831 // might be cluttered by the potential first read barrier
6832 // emission at the beginning of this method.
6833 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006834 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006835
6836 case TypeCheckKind::kInterfaceCheck: {
6837 // Fast path for the interface check. Since we compare with a memory location in the inner
6838 // loop we would need to have cls poisoned. However unpoisoning cls would reset the
6839 // conditional flags and cause the conditional jump to be incorrect. Therefore we just jump
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006840 // to the slow path if we are running under poisoning.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006841 if (!kPoisonHeapReferences) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006842 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6843 // doing this.
6844 // /* HeapReference<Class> */ temp = obj->klass_
6845 GenerateReferenceLoadTwoRegisters(instruction,
6846 temp_loc,
6847 obj_loc,
6848 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006849 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006850
6851 // /* HeapReference<Class> */ temp = temp->iftable_
6852 GenerateReferenceLoadTwoRegisters(instruction,
6853 temp_loc,
6854 temp_loc,
6855 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006856 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006857 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006858 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006859 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006860 NearLabel start_loop;
6861 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006862 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006863 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006864 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6865 // Go to next interface if the classes do not match.
6866 __ cmpl(cls.AsRegister<Register>(),
6867 CodeGeneratorX86::ArrayAddress(temp,
6868 maybe_temp2_loc,
6869 TIMES_4,
6870 object_array_data_offset));
6871 __ j(kNotEqual, &start_loop);
6872 } else {
6873 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006874 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006875 break;
6876 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006877 }
6878 __ Bind(&done);
6879
Roland Levillain0d5a2812015-11-13 10:07:31 +00006880 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006881}
6882
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006883void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
6884 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006885 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006886 InvokeRuntimeCallingConvention calling_convention;
6887 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6888}
6889
6890void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006891 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
6892 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006893 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006894 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006895 if (instruction->IsEnter()) {
6896 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6897 } else {
6898 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6899 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006900}
6901
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006902void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6903void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6904void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6905
6906void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6907 LocationSummary* locations =
6908 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6909 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6910 || instruction->GetResultType() == Primitive::kPrimLong);
6911 locations->SetInAt(0, Location::RequiresRegister());
6912 locations->SetInAt(1, Location::Any());
6913 locations->SetOut(Location::SameAsFirstInput());
6914}
6915
6916void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
6917 HandleBitwiseOperation(instruction);
6918}
6919
6920void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
6921 HandleBitwiseOperation(instruction);
6922}
6923
6924void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
6925 HandleBitwiseOperation(instruction);
6926}
6927
6928void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6929 LocationSummary* locations = instruction->GetLocations();
6930 Location first = locations->InAt(0);
6931 Location second = locations->InAt(1);
6932 DCHECK(first.Equals(locations->Out()));
6933
6934 if (instruction->GetResultType() == Primitive::kPrimInt) {
6935 if (second.IsRegister()) {
6936 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006937 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006938 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006939 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006940 } else {
6941 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006942 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006943 }
6944 } else if (second.IsConstant()) {
6945 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006946 __ andl(first.AsRegister<Register>(),
6947 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006948 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00006949 __ orl(first.AsRegister<Register>(),
6950 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006951 } else {
6952 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00006953 __ xorl(first.AsRegister<Register>(),
6954 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006955 }
6956 } else {
6957 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006958 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006959 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006960 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006961 } else {
6962 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006963 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006964 }
6965 }
6966 } else {
6967 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
6968 if (second.IsRegisterPair()) {
6969 if (instruction->IsAnd()) {
6970 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6971 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6972 } else if (instruction->IsOr()) {
6973 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6974 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6975 } else {
6976 DCHECK(instruction->IsXor());
6977 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
6978 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
6979 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006980 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006981 if (instruction->IsAnd()) {
6982 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6983 __ andl(first.AsRegisterPairHigh<Register>(),
6984 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6985 } else if (instruction->IsOr()) {
6986 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6987 __ orl(first.AsRegisterPairHigh<Register>(),
6988 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6989 } else {
6990 DCHECK(instruction->IsXor());
6991 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
6992 __ xorl(first.AsRegisterPairHigh<Register>(),
6993 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
6994 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006995 } else {
6996 DCHECK(second.IsConstant()) << second;
6997 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04006998 int32_t low_value = Low32Bits(value);
6999 int32_t high_value = High32Bits(value);
7000 Immediate low(low_value);
7001 Immediate high(high_value);
7002 Register first_low = first.AsRegisterPairLow<Register>();
7003 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007004 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007005 if (low_value == 0) {
7006 __ xorl(first_low, first_low);
7007 } else if (low_value != -1) {
7008 __ andl(first_low, low);
7009 }
7010 if (high_value == 0) {
7011 __ xorl(first_high, first_high);
7012 } else if (high_value != -1) {
7013 __ andl(first_high, high);
7014 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007015 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007016 if (low_value != 0) {
7017 __ orl(first_low, low);
7018 }
7019 if (high_value != 0) {
7020 __ orl(first_high, high);
7021 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007022 } else {
7023 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007024 if (low_value != 0) {
7025 __ xorl(first_low, low);
7026 }
7027 if (high_value != 0) {
7028 __ xorl(first_high, high);
7029 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007030 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007031 }
7032 }
7033}
7034
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007035void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
7036 HInstruction* instruction,
7037 Location out,
7038 uint32_t offset,
7039 Location maybe_temp,
7040 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007041 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007042 if (read_barrier_option == kWithReadBarrier) {
7043 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007044 if (kUseBakerReadBarrier) {
7045 // Load with fast path based Baker's read barrier.
7046 // /* HeapReference<Object> */ out = *(out + offset)
7047 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00007048 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007049 } else {
7050 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007051 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00007052 // in the following move operation, as we will need it for the
7053 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007054 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007055 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007056 // /* HeapReference<Object> */ out = *(out + offset)
7057 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007058 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007059 }
7060 } else {
7061 // Plain load with no read barrier.
7062 // /* HeapReference<Object> */ out = *(out + offset)
7063 __ movl(out_reg, Address(out_reg, offset));
7064 __ MaybeUnpoisonHeapReference(out_reg);
7065 }
7066}
7067
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007068void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
7069 HInstruction* instruction,
7070 Location out,
7071 Location obj,
7072 uint32_t offset,
7073 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007074 Register out_reg = out.AsRegister<Register>();
7075 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007076 if (read_barrier_option == kWithReadBarrier) {
7077 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007078 if (kUseBakerReadBarrier) {
7079 // Load with fast path based Baker's read barrier.
7080 // /* HeapReference<Object> */ out = *(obj + offset)
7081 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00007082 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007083 } else {
7084 // Load with slow path based read barrier.
7085 // /* HeapReference<Object> */ out = *(obj + offset)
7086 __ movl(out_reg, Address(obj_reg, offset));
7087 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7088 }
7089 } else {
7090 // Plain load with no read barrier.
7091 // /* HeapReference<Object> */ out = *(obj + offset)
7092 __ movl(out_reg, Address(obj_reg, offset));
7093 __ MaybeUnpoisonHeapReference(out_reg);
7094 }
7095}
7096
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007097void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
7098 HInstruction* instruction,
7099 Location root,
7100 const Address& address,
7101 Label* fixup_label,
7102 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007103 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007104 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007105 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007106 if (kUseBakerReadBarrier) {
7107 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7108 // Baker's read barrier are used:
7109 //
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007110 // root = *address;
Roland Levillain7c1559a2015-12-15 10:55:36 +00007111 // if (Thread::Current()->GetIsGcMarking()) {
7112 // root = ReadBarrier::Mark(root)
7113 // }
7114
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007115 // /* GcRoot<mirror::Object> */ root = *address
7116 __ movl(root_reg, address);
7117 if (fixup_label != nullptr) {
7118 __ Bind(fixup_label);
7119 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007120 static_assert(
7121 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7122 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7123 "have different sizes.");
7124 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7125 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7126 "have different sizes.");
7127
Vladimir Marko953437b2016-08-24 08:30:46 +00007128 // Slow path marking the GC root `root`.
7129 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007130 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007131 codegen_->AddSlowPath(slow_path);
7132
Andreas Gampe542451c2016-07-26 09:02:02 -07007133 __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00007134 Immediate(0));
7135 __ j(kNotEqual, slow_path->GetEntryLabel());
7136 __ Bind(slow_path->GetExitLabel());
7137 } else {
7138 // GC root loaded through a slow path for read barriers other
7139 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007140 // /* GcRoot<mirror::Object>* */ root = address
7141 __ leal(root_reg, address);
7142 if (fixup_label != nullptr) {
7143 __ Bind(fixup_label);
7144 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007145 // /* mirror::Object* */ root = root->Read()
7146 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7147 }
7148 } else {
7149 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007150 // /* GcRoot<mirror::Object> */ root = *address
7151 __ movl(root_reg, address);
7152 if (fixup_label != nullptr) {
7153 __ Bind(fixup_label);
7154 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007155 // Note that GC roots are not affected by heap poisoning, thus we
7156 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007157 }
7158}
7159
7160void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7161 Location ref,
7162 Register obj,
7163 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007164 bool needs_null_check) {
7165 DCHECK(kEmitCompilerReadBarrier);
7166 DCHECK(kUseBakerReadBarrier);
7167
7168 // /* HeapReference<Object> */ ref = *(obj + offset)
7169 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007170 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007171}
7172
7173void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7174 Location ref,
7175 Register obj,
7176 uint32_t data_offset,
7177 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007178 bool needs_null_check) {
7179 DCHECK(kEmitCompilerReadBarrier);
7180 DCHECK(kUseBakerReadBarrier);
7181
Roland Levillain3d312422016-06-23 13:53:42 +01007182 static_assert(
7183 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7184 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007185 // /* HeapReference<Object> */ ref =
7186 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007187 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007188 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007189}
7190
7191void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7192 Location ref,
7193 Register obj,
7194 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007195 bool needs_null_check,
7196 bool always_update_field,
7197 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007198 DCHECK(kEmitCompilerReadBarrier);
7199 DCHECK(kUseBakerReadBarrier);
7200
7201 // In slow path based read barriers, the read barrier call is
7202 // inserted after the original load. However, in fast path based
7203 // Baker's read barriers, we need to perform the load of
7204 // mirror::Object::monitor_ *before* the original reference load.
7205 // This load-load ordering is required by the read barrier.
7206 // The fast path/slow path (for Baker's algorithm) should look like:
7207 //
7208 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7209 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7210 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007211 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007212 // if (is_gray) {
7213 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7214 // }
7215 //
7216 // Note: the original implementation in ReadBarrier::Barrier is
7217 // slightly more complex as:
7218 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007219 // the high-bits of rb_state, which are expected to be all zeroes
7220 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7221 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007222 // - it performs additional checks that we do not do here for
7223 // performance reasons.
7224
7225 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007226 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7227
Vladimir Marko953437b2016-08-24 08:30:46 +00007228 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007229 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
7230 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007231 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7232 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7233 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7234
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007235 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007236 // ref = ReadBarrier::Mark(ref);
7237 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7238 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007239 if (needs_null_check) {
7240 MaybeRecordImplicitNullCheck(instruction);
7241 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007242
7243 // Load fence to prevent load-load reordering.
7244 // Note that this is a no-op, thanks to the x86 memory model.
7245 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7246
7247 // The actual reference load.
7248 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007249 __ movl(ref_reg, src); // Flags are unaffected.
7250
7251 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7252 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007253 SlowPathCode* slow_path;
7254 if (always_update_field) {
7255 DCHECK(temp != nullptr);
7256 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
7257 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp);
7258 } else {
7259 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
7260 instruction, ref, /* unpoison_ref_before_marking */ true);
7261 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007262 AddSlowPath(slow_path);
7263
7264 // We have done the "if" of the gray bit check above, now branch based on the flags.
7265 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007266
7267 // Object* ref = ref_addr->AsMirrorPtr()
7268 __ MaybeUnpoisonHeapReference(ref_reg);
7269
Roland Levillain7c1559a2015-12-15 10:55:36 +00007270 __ Bind(slow_path->GetExitLabel());
7271}
7272
7273void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7274 Location out,
7275 Location ref,
7276 Location obj,
7277 uint32_t offset,
7278 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007279 DCHECK(kEmitCompilerReadBarrier);
7280
Roland Levillain7c1559a2015-12-15 10:55:36 +00007281 // Insert a slow path based read barrier *after* the reference load.
7282 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007283 // If heap poisoning is enabled, the unpoisoning of the loaded
7284 // reference will be carried out by the runtime within the slow
7285 // path.
7286 //
7287 // Note that `ref` currently does not get unpoisoned (when heap
7288 // poisoning is enabled), which is alright as the `ref` argument is
7289 // not used by the artReadBarrierSlow entry point.
7290 //
7291 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
7292 SlowPathCode* slow_path = new (GetGraph()->GetArena())
7293 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7294 AddSlowPath(slow_path);
7295
Roland Levillain0d5a2812015-11-13 10:07:31 +00007296 __ jmp(slow_path->GetEntryLabel());
7297 __ Bind(slow_path->GetExitLabel());
7298}
7299
Roland Levillain7c1559a2015-12-15 10:55:36 +00007300void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7301 Location out,
7302 Location ref,
7303 Location obj,
7304 uint32_t offset,
7305 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007306 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007307 // Baker's read barriers shall be handled by the fast path
7308 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7309 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007310 // If heap poisoning is enabled, unpoisoning will be taken care of
7311 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007312 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007313 } else if (kPoisonHeapReferences) {
7314 __ UnpoisonHeapReference(out.AsRegister<Register>());
7315 }
7316}
7317
Roland Levillain7c1559a2015-12-15 10:55:36 +00007318void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7319 Location out,
7320 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007321 DCHECK(kEmitCompilerReadBarrier);
7322
Roland Levillain7c1559a2015-12-15 10:55:36 +00007323 // Insert a slow path based read barrier *after* the GC root load.
7324 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007325 // Note that GC roots are not affected by heap poisoning, so we do
7326 // not need to do anything special for this here.
7327 SlowPathCode* slow_path =
7328 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86(instruction, out, root);
7329 AddSlowPath(slow_path);
7330
Roland Levillain0d5a2812015-11-13 10:07:31 +00007331 __ jmp(slow_path->GetEntryLabel());
7332 __ Bind(slow_path->GetExitLabel());
7333}
7334
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007335void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007336 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007337 LOG(FATAL) << "Unreachable";
7338}
7339
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007340void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007341 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007342 LOG(FATAL) << "Unreachable";
7343}
7344
Mark Mendellfe57faa2015-09-18 09:26:15 -04007345// Simple implementation of packed switch - generate cascaded compare/jumps.
7346void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7347 LocationSummary* locations =
7348 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7349 locations->SetInAt(0, Location::RequiresRegister());
7350}
7351
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007352void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
7353 int32_t lower_bound,
7354 uint32_t num_entries,
7355 HBasicBlock* switch_block,
7356 HBasicBlock* default_block) {
7357 // Figure out the correct compare values and jump conditions.
7358 // Handle the first compare/branch as a special case because it might
7359 // jump to the default case.
7360 DCHECK_GT(num_entries, 2u);
7361 Condition first_condition;
7362 uint32_t index;
7363 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7364 if (lower_bound != 0) {
7365 first_condition = kLess;
7366 __ cmpl(value_reg, Immediate(lower_bound));
7367 __ j(first_condition, codegen_->GetLabelOf(default_block));
7368 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007369
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007370 index = 1;
7371 } else {
7372 // Handle all the compare/jumps below.
7373 first_condition = kBelow;
7374 index = 0;
7375 }
7376
7377 // Handle the rest of the compare/jumps.
7378 for (; index + 1 < num_entries; index += 2) {
7379 int32_t compare_to_value = lower_bound + index + 1;
7380 __ cmpl(value_reg, Immediate(compare_to_value));
7381 // Jump to successors[index] if value < case_value[index].
7382 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7383 // Jump to successors[index + 1] if value == case_value[index + 1].
7384 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7385 }
7386
7387 if (index != num_entries) {
7388 // There are an odd number of entries. Handle the last one.
7389 DCHECK_EQ(index + 1, num_entries);
7390 __ cmpl(value_reg, Immediate(lower_bound + index));
7391 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007392 }
7393
7394 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007395 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
7396 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007397 }
7398}
7399
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007400void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7401 int32_t lower_bound = switch_instr->GetStartValue();
7402 uint32_t num_entries = switch_instr->GetNumEntries();
7403 LocationSummary* locations = switch_instr->GetLocations();
7404 Register value_reg = locations->InAt(0).AsRegister<Register>();
7405
7406 GenPackedSwitchWithCompares(value_reg,
7407 lower_bound,
7408 num_entries,
7409 switch_instr->GetBlock(),
7410 switch_instr->GetDefaultBlock());
7411}
7412
Mark Mendell805b3b52015-09-18 14:10:29 -04007413void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7414 LocationSummary* locations =
7415 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7416 locations->SetInAt(0, Location::RequiresRegister());
7417
7418 // Constant area pointer.
7419 locations->SetInAt(1, Location::RequiresRegister());
7420
7421 // And the temporary we need.
7422 locations->AddTemp(Location::RequiresRegister());
7423}
7424
7425void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7426 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007427 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04007428 LocationSummary* locations = switch_instr->GetLocations();
7429 Register value_reg = locations->InAt(0).AsRegister<Register>();
7430 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7431
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007432 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7433 GenPackedSwitchWithCompares(value_reg,
7434 lower_bound,
7435 num_entries,
7436 switch_instr->GetBlock(),
7437 default_block);
7438 return;
7439 }
7440
Mark Mendell805b3b52015-09-18 14:10:29 -04007441 // Optimizing has a jump area.
7442 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
7443 Register constant_area = locations->InAt(1).AsRegister<Register>();
7444
7445 // Remove the bias, if needed.
7446 if (lower_bound != 0) {
7447 __ leal(temp_reg, Address(value_reg, -lower_bound));
7448 value_reg = temp_reg;
7449 }
7450
7451 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007452 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04007453 __ cmpl(value_reg, Immediate(num_entries - 1));
7454 __ j(kAbove, codegen_->GetLabelOf(default_block));
7455
7456 // We are in the range of the table.
7457 // Load (target-constant_area) from the jump table, indexing by the value.
7458 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
7459
7460 // Compute the actual target address by adding in constant_area.
7461 __ addl(temp_reg, constant_area);
7462
7463 // And jump.
7464 __ jmp(temp_reg);
7465}
7466
Mark Mendell0616ae02015-04-17 12:49:27 -04007467void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
7468 HX86ComputeBaseMethodAddress* insn) {
7469 LocationSummary* locations =
7470 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7471 locations->SetOut(Location::RequiresRegister());
7472}
7473
7474void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
7475 HX86ComputeBaseMethodAddress* insn) {
7476 LocationSummary* locations = insn->GetLocations();
7477 Register reg = locations->Out().AsRegister<Register>();
7478
7479 // Generate call to next instruction.
7480 Label next_instruction;
7481 __ call(&next_instruction);
7482 __ Bind(&next_instruction);
7483
7484 // Remember this offset for later use with constant area.
7485 codegen_->SetMethodAddressOffset(GetAssembler()->CodeSize());
7486
7487 // Grab the return address off the stack.
7488 __ popl(reg);
7489}
7490
7491void LocationsBuilderX86::VisitX86LoadFromConstantTable(
7492 HX86LoadFromConstantTable* insn) {
7493 LocationSummary* locations =
7494 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7495
7496 locations->SetInAt(0, Location::RequiresRegister());
7497 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
7498
7499 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00007500 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007501 return;
7502 }
7503
7504 switch (insn->GetType()) {
7505 case Primitive::kPrimFloat:
7506 case Primitive::kPrimDouble:
7507 locations->SetOut(Location::RequiresFpuRegister());
7508 break;
7509
7510 case Primitive::kPrimInt:
7511 locations->SetOut(Location::RequiresRegister());
7512 break;
7513
7514 default:
7515 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7516 }
7517}
7518
7519void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00007520 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007521 return;
7522 }
7523
7524 LocationSummary* locations = insn->GetLocations();
7525 Location out = locations->Out();
7526 Register const_area = locations->InAt(0).AsRegister<Register>();
7527 HConstant *value = insn->GetConstant();
7528
7529 switch (insn->GetType()) {
7530 case Primitive::kPrimFloat:
7531 __ movss(out.AsFpuRegister<XmmRegister>(),
7532 codegen_->LiteralFloatAddress(value->AsFloatConstant()->GetValue(), const_area));
7533 break;
7534
7535 case Primitive::kPrimDouble:
7536 __ movsd(out.AsFpuRegister<XmmRegister>(),
7537 codegen_->LiteralDoubleAddress(value->AsDoubleConstant()->GetValue(), const_area));
7538 break;
7539
7540 case Primitive::kPrimInt:
7541 __ movl(out.AsRegister<Register>(),
7542 codegen_->LiteralInt32Address(value->AsIntConstant()->GetValue(), const_area));
7543 break;
7544
7545 default:
7546 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7547 }
7548}
7549
Mark Mendell0616ae02015-04-17 12:49:27 -04007550/**
7551 * Class to handle late fixup of offsets into constant area.
7552 */
Vladimir Marko5233f932015-09-29 19:01:15 +01007553class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04007554 public:
Mark Mendell805b3b52015-09-18 14:10:29 -04007555 RIPFixup(CodeGeneratorX86& codegen, size_t offset)
7556 : codegen_(&codegen), offset_into_constant_area_(offset) {}
7557
7558 protected:
7559 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7560
7561 CodeGeneratorX86* codegen_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007562
7563 private:
7564 void Process(const MemoryRegion& region, int pos) OVERRIDE {
7565 // Patch the correct offset for the instruction. The place to patch is the
7566 // last 4 bytes of the instruction.
7567 // The value to patch is the distance from the offset in the constant area
7568 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04007569 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Mathieu Chartier6beced42016-11-15 15:51:31 -08007570 int32_t relative_position = constant_offset - codegen_->GetMethodAddressOffset();
Mark Mendell0616ae02015-04-17 12:49:27 -04007571
7572 // Patch in the right value.
7573 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7574 }
7575
Mark Mendell0616ae02015-04-17 12:49:27 -04007576 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04007577 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007578};
7579
Mark Mendell805b3b52015-09-18 14:10:29 -04007580/**
7581 * Class to handle late fixup of offsets to a jump table that will be created in the
7582 * constant area.
7583 */
7584class JumpTableRIPFixup : public RIPFixup {
7585 public:
7586 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
7587 : RIPFixup(codegen, static_cast<size_t>(-1)), switch_instr_(switch_instr) {}
7588
7589 void CreateJumpTable() {
7590 X86Assembler* assembler = codegen_->GetAssembler();
7591
7592 // Ensure that the reference to the jump table has the correct offset.
7593 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7594 SetOffset(offset_in_constant_table);
7595
7596 // The label values in the jump table are computed relative to the
7597 // instruction addressing the constant area.
7598 const int32_t relative_offset = codegen_->GetMethodAddressOffset();
7599
7600 // Populate the jump table with the correct values for the jump table.
7601 int32_t num_entries = switch_instr_->GetNumEntries();
7602 HBasicBlock* block = switch_instr_->GetBlock();
7603 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7604 // The value that we want is the target offset - the position of the table.
7605 for (int32_t i = 0; i < num_entries; i++) {
7606 HBasicBlock* b = successors[i];
7607 Label* l = codegen_->GetLabelOf(b);
7608 DCHECK(l->IsBound());
7609 int32_t offset_to_block = l->Position() - relative_offset;
7610 assembler->AppendInt32(offset_to_block);
7611 }
7612 }
7613
7614 private:
7615 const HX86PackedSwitch* switch_instr_;
7616};
7617
7618void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
7619 // Generate the constant area if needed.
7620 X86Assembler* assembler = GetAssembler();
7621 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7622 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
7623 // byte values.
7624 assembler->Align(4, 0);
7625 constant_area_start_ = assembler->CodeSize();
7626
7627 // Populate any jump tables.
7628 for (auto jump_table : fixups_to_jump_tables_) {
7629 jump_table->CreateJumpTable();
7630 }
7631
7632 // And now add the constant area to the generated code.
7633 assembler->AddConstantArea();
7634 }
7635
7636 // And finish up.
7637 CodeGenerator::Finalize(allocator);
7638}
7639
Mark Mendell0616ae02015-04-17 12:49:27 -04007640Address CodeGeneratorX86::LiteralDoubleAddress(double v, Register reg) {
7641 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddDouble(v));
7642 return Address(reg, kDummy32BitOffset, fixup);
7643}
7644
7645Address CodeGeneratorX86::LiteralFloatAddress(float v, Register reg) {
7646 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddFloat(v));
7647 return Address(reg, kDummy32BitOffset, fixup);
7648}
7649
7650Address CodeGeneratorX86::LiteralInt32Address(int32_t v, Register reg) {
7651 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt32(v));
7652 return Address(reg, kDummy32BitOffset, fixup);
7653}
7654
7655Address CodeGeneratorX86::LiteralInt64Address(int64_t v, Register reg) {
7656 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, __ AddInt64(v));
7657 return Address(reg, kDummy32BitOffset, fixup);
7658}
7659
Aart Bika19616e2016-02-01 18:57:58 -08007660void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
7661 if (value == 0) {
7662 __ xorl(dest, dest);
7663 } else {
7664 __ movl(dest, Immediate(value));
7665 }
7666}
7667
7668void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
7669 if (value == 0) {
7670 __ testl(dest, dest);
7671 } else {
7672 __ cmpl(dest, Immediate(value));
7673 }
7674}
7675
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007676void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
7677 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07007678 GenerateIntCompare(lhs_reg, rhs);
7679}
7680
7681void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007682 if (rhs.IsConstant()) {
7683 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007684 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007685 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007686 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007687 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007688 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007689 }
7690}
7691
7692Address CodeGeneratorX86::ArrayAddress(Register obj,
7693 Location index,
7694 ScaleFactor scale,
7695 uint32_t data_offset) {
7696 return index.IsConstant() ?
7697 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7698 Address(obj, index.AsRegister<Register>(), scale, data_offset);
7699}
7700
Mark Mendell805b3b52015-09-18 14:10:29 -04007701Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
7702 Register reg,
7703 Register value) {
7704 // Create a fixup to be used to create and address the jump table.
7705 JumpTableRIPFixup* table_fixup =
7706 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7707
7708 // We have to populate the jump tables.
7709 fixups_to_jump_tables_.push_back(table_fixup);
7710
7711 // We want a scaled address, as we are extracting the correct offset from the table.
7712 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
7713}
7714
Andreas Gampe85b62f22015-09-09 13:15:38 -07007715// TODO: target as memory.
7716void CodeGeneratorX86::MoveFromReturnRegister(Location target, Primitive::Type type) {
7717 if (!target.IsValid()) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007718 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007719 return;
7720 }
7721
7722 DCHECK_NE(type, Primitive::kPrimVoid);
7723
7724 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
7725 if (target.Equals(return_loc)) {
7726 return;
7727 }
7728
7729 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
7730 // with the else branch.
7731 if (type == Primitive::kPrimLong) {
7732 HParallelMove parallel_move(GetGraph()->GetArena());
7733 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), Primitive::kPrimInt, nullptr);
7734 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), Primitive::kPrimInt, nullptr);
7735 GetMoveResolver()->EmitNativeCode(&parallel_move);
7736 } else {
7737 // Let the parallel move resolver take care of all of this.
7738 HParallelMove parallel_move(GetGraph()->GetArena());
7739 parallel_move.AddMove(return_loc, target, type, nullptr);
7740 GetMoveResolver()->EmitNativeCode(&parallel_move);
7741 }
7742}
7743
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007744void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
7745 const uint8_t* roots_data,
7746 const PatchInfo<Label>& info,
7747 uint64_t index_in_table) const {
7748 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7749 uintptr_t address =
7750 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7751 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7752 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7753 dchecked_integral_cast<uint32_t>(address);
7754}
7755
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007756void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7757 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007758 const auto& it = jit_string_roots_.find(
7759 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007760 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007761 PatchJitRootUse(code, roots_data, info, it->second);
7762 }
7763
7764 for (const PatchInfo<Label>& info : jit_class_patches_) {
7765 const auto& it = jit_class_roots_.find(
7766 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7767 DCHECK(it != jit_class_roots_.end());
7768 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007769 }
7770}
7771
Roland Levillain4d027112015-07-01 15:41:14 +01007772#undef __
7773
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00007774} // namespace x86
7775} // namespace art