blob: 1e867dde51b20bd4a6f8569a8cf1b719bc64110b [file] [log] [blame]
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010018
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method.h"
Guillaume Sanchez0f88e872015-03-30 17:55:45 +010020#include "code_generator_utils.h"
Vladimir Marko58155012015-08-19 12:49:41 +000021#include "compiled_method.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010022#include "entrypoints/quick/quick_entrypoints.h"
Nicolas Geoffraycb1b00a2015-01-28 14:50:01 +000023#include "entrypoints/quick/quick_entrypoints_enum.h"
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +010024#include "gc/accounting/card_table.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040025#include "intrinsics.h"
26#include "intrinsics_x86.h"
Ian Rogers7e70b002014-10-08 11:47:24 -070027#include "mirror/array-inl.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070028#include "mirror/class-inl.h"
Nicolas Geoffrayf6e206c2014-08-07 20:25:41 +010029#include "thread.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000030#include "utils/assembler.h"
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010031#include "utils/stack_checks.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000032#include "utils/x86/assembler_x86.h"
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010033#include "utils/x86/managed_register_x86.h"
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000034
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000035namespace art {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +010036
Roland Levillain0d5a2812015-11-13 10:07:31 +000037template<class MirrorType>
38class GcRoot;
39
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +000040namespace x86 {
41
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +010042static constexpr int kCurrentMethodStackOffset = 0;
Nicolas Geoffray76b1e172015-05-27 17:18:33 +010043static constexpr Register kMethodRegisterArgument = EAX;
Mark Mendell5f874182015-03-04 15:42:45 -050044static constexpr Register kCoreCalleeSaves[] = { EBP, ESI, EDI };
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +010045
Mark Mendell24f2dfa2015-01-14 19:51:45 -050046static constexpr int kC2ConditionMask = 0x400;
47
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +000048static constexpr int kFakeReturnRegister = Register(8);
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +000049
Roland Levillain7cbd27f2016-08-11 23:53:33 +010050// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
51#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -070052#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, x).Int32Value()
Nicolas Geoffraye5038322014-07-04 09:41:32 +010053
Andreas Gampe85b62f22015-09-09 13:15:38 -070054class NullCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffraye5038322014-07-04 09:41:32 +010055 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000056 explicit NullCheckSlowPathX86(HNullCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye5038322014-07-04 09:41:32 +010057
Alexandre Rames2ed20af2015-03-06 13:55:35 +000058 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010059 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffraye5038322014-07-04 09:41:32 +010060 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +000061 if (instruction_->CanThrowIntoCatchBlock()) {
62 // Live registers will be restored in the catch block if caught.
63 SaveLiveRegisters(codegen, instruction_->GetLocations());
64 }
Serban Constantinescuba45db02016-07-12 22:53:02 +010065 x86_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexandre Rames8158f282015-08-07 10:26:17 +010066 instruction_,
67 instruction_->GetDexPc(),
68 this);
Roland Levillain888d0672015-11-23 18:53:50 +000069 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
Nicolas Geoffraye5038322014-07-04 09:41:32 +010070 }
71
Alexandre Rames8158f282015-08-07 10:26:17 +010072 bool IsFatal() const OVERRIDE { return true; }
73
Alexandre Rames9931f312015-06-19 14:47:01 +010074 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathX86"; }
75
Nicolas Geoffraye5038322014-07-04 09:41:32 +010076 private:
Nicolas Geoffraye5038322014-07-04 09:41:32 +010077 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
78};
79
Andreas Gampe85b62f22015-09-09 13:15:38 -070080class DivZeroCheckSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +000081 public:
David Srbecky9cd6d372016-02-09 15:24:47 +000082 explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : SlowPathCode(instruction) {}
Calin Juravled0d48522014-11-04 16:40:20 +000083
Alexandre Rames2ed20af2015-03-06 13:55:35 +000084 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +010085 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Calin Juravled0d48522014-11-04 16:40:20 +000086 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +010087 x86_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +000088 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
Calin Juravled0d48522014-11-04 16:40:20 +000089 }
90
Alexandre Rames8158f282015-08-07 10:26:17 +010091 bool IsFatal() const OVERRIDE { return true; }
92
Alexandre Rames9931f312015-06-19 14:47:01 +010093 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathX86"; }
94
Calin Juravled0d48522014-11-04 16:40:20 +000095 private:
Calin Juravled0d48522014-11-04 16:40:20 +000096 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
97};
98
Andreas Gampe85b62f22015-09-09 13:15:38 -070099class DivRemMinusOneSlowPathX86 : public SlowPathCode {
Calin Juravled0d48522014-11-04 16:40:20 +0000100 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000101 DivRemMinusOneSlowPathX86(HInstruction* instruction, Register reg, bool is_div)
102 : SlowPathCode(instruction), reg_(reg), is_div_(is_div) {}
Calin Juravled0d48522014-11-04 16:40:20 +0000103
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000104 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Calin Juravled0d48522014-11-04 16:40:20 +0000105 __ Bind(GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +0000106 if (is_div_) {
107 __ negl(reg_);
108 } else {
109 __ movl(reg_, Immediate(0));
110 }
Calin Juravled0d48522014-11-04 16:40:20 +0000111 __ jmp(GetExitLabel());
112 }
113
Alexandre Rames9931f312015-06-19 14:47:01 +0100114 const char* GetDescription() const OVERRIDE { return "DivRemMinusOneSlowPathX86"; }
115
Calin Juravled0d48522014-11-04 16:40:20 +0000116 private:
117 Register reg_;
Calin Juravlebacfec32014-11-14 15:54:36 +0000118 bool is_div_;
119 DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
Calin Juravled0d48522014-11-04 16:40:20 +0000120};
121
Andreas Gampe85b62f22015-09-09 13:15:38 -0700122class BoundsCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100123 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000124 explicit BoundsCheckSlowPathX86(HBoundsCheck* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100125
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000126 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100127 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100128 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100129 __ Bind(GetEntryLabel());
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
David Brazdil77a48ae2015-09-15 12:34:04 +0000132 if (instruction_->CanThrowIntoCatchBlock()) {
133 // Live registers will be restored in the catch block if caught.
134 SaveLiveRegisters(codegen, instruction_->GetLocations());
135 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400136
137 // Are we using an array length from memory?
138 HInstruction* array_length = instruction_->InputAt(1);
139 Location length_loc = locations->InAt(1);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100140 InvokeRuntimeCallingConvention calling_convention;
Mark Mendellee8d9712016-07-12 11:13:15 -0400141 if (array_length->IsArrayLength() && array_length->IsEmittedAtUseSite()) {
142 // Load the array length into our temporary.
143 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
144 Location array_loc = array_length->GetLocations()->InAt(0);
145 Address array_len(array_loc.AsRegister<Register>(), len_offset);
146 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(1));
147 // Check for conflicts with index.
148 if (length_loc.Equals(locations->InAt(0))) {
149 // We know we aren't using parameter 2.
150 length_loc = Location::RegisterLocation(calling_convention.GetRegisterAt(2));
151 }
152 __ movl(length_loc.AsRegister<Register>(), array_len);
jessicahandojo4877b792016-09-08 19:49:13 -0700153 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +0100154 __ shrl(length_loc.AsRegister<Register>(), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -0700155 }
Mark Mendellee8d9712016-07-12 11:13:15 -0400156 }
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000157 x86_codegen->EmitParallelMoves(
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100158 locations->InAt(0),
Nicolas Geoffrayf0e39372014-11-12 17:50:07 +0000159 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Nicolas Geoffray90218252015-04-15 11:56:51 +0100160 Primitive::kPrimInt,
Mark Mendellee8d9712016-07-12 11:13:15 -0400161 length_loc,
Nicolas Geoffray90218252015-04-15 11:56:51 +0100162 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
163 Primitive::kPrimInt);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100164 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
165 ? kQuickThrowStringBounds
166 : kQuickThrowArrayBounds;
167 x86_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100168 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Roland Levillain888d0672015-11-23 18:53:50 +0000169 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100170 }
171
Alexandre Rames8158f282015-08-07 10:26:17 +0100172 bool IsFatal() const OVERRIDE { return true; }
173
Alexandre Rames9931f312015-06-19 14:47:01 +0100174 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathX86"; }
175
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100176 private:
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +0100177 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
178};
179
Andreas Gampe85b62f22015-09-09 13:15:38 -0700180class SuspendCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000181 public:
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000182 SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000183 : SlowPathCode(instruction), successor_(successor) {}
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000184
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000185 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bikb13c65b2017-03-21 20:14:07 -0700186 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100187 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000188 __ Bind(GetEntryLabel());
Aart Bik24b905f2017-04-06 09:59:06 -0700189 SaveLiveRegisters(codegen, locations); // Only saves full width XMM for SIMD.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100190 x86_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000191 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Aart Bik24b905f2017-04-06 09:59:06 -0700192 RestoreLiveRegisters(codegen, locations); // Only restores full width XMM for SIMD.
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100193 if (successor_ == nullptr) {
194 __ jmp(GetReturnLabel());
195 } else {
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +0100196 __ jmp(x86_codegen->GetLabelOf(successor_));
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100197 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000198 }
199
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100200 Label* GetReturnLabel() {
201 DCHECK(successor_ == nullptr);
202 return &return_label_;
203 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000204
Nicolas Geoffraydb216f42015-05-05 17:02:20 +0100205 HBasicBlock* GetSuccessor() const {
206 return successor_;
207 }
208
Alexandre Rames9931f312015-06-19 14:47:01 +0100209 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathX86"; }
210
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000211 private:
Nicolas Geoffray3c049742014-09-24 18:10:46 +0100212 HBasicBlock* const successor_;
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +0000213 Label return_label_;
214
215 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
216};
217
Vladimir Markoaad75c62016-10-03 08:46:48 +0000218class LoadStringSlowPathX86 : public SlowPathCode {
219 public:
220 explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {}
221
222 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
223 LocationSummary* locations = instruction_->GetLocations();
224 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
225
226 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
227 __ Bind(GetEntryLabel());
228 SaveLiveRegisters(codegen, locations);
229
230 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000231 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
232 __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
Vladimir Markoaad75c62016-10-03 08:46:48 +0000233 x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
234 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
235 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
236 RestoreLiveRegisters(codegen, locations);
237
238 // Store the resolved String to the BSS entry.
239 Register method_address = locations->InAt(0).AsRegister<Register>();
240 __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
241 locations->Out().AsRegister<Register>());
242 Label* fixup_label = x86_codegen->NewStringBssEntryPatch(instruction_->AsLoadString());
243 __ Bind(fixup_label);
244
245 __ jmp(GetExitLabel());
246 }
247
248 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86"; }
249
250 private:
251 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
252};
253
Andreas Gampe85b62f22015-09-09 13:15:38 -0700254class LoadClassSlowPathX86 : public SlowPathCode {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000255 public:
256 LoadClassSlowPathX86(HLoadClass* cls,
257 HInstruction* at,
258 uint32_t dex_pc,
259 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000260 : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000261 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
262 }
263
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000264 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000265 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000266 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
267 __ Bind(GetEntryLabel());
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000268 SaveLiveRegisters(codegen, locations);
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000269
270 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000271 dex::TypeIndex type_index = cls_->GetTypeIndex();
272 __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
Serban Constantinescuba45db02016-07-12 22:53:02 +0100273 x86_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage
274 : kQuickInitializeType,
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000275 instruction_,
276 dex_pc_,
277 this);
Roland Levillain888d0672015-11-23 18:53:50 +0000278 if (do_clinit_) {
279 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
280 } else {
281 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
282 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000283
284 // Move the class to the desired location.
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000285 Location out = locations->Out();
286 if (out.IsValid()) {
287 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
288 x86_codegen->Move32(out, Location::RegisterLocation(EAX));
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000289 }
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000290 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000291 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
292 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
293 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
294 DCHECK(out.IsValid());
295 Register method_address = locations->InAt(0).AsRegister<Register>();
296 __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
297 locations->Out().AsRegister<Register>());
298 Label* fixup_label = x86_codegen->NewTypeBssEntryPatch(cls_);
299 __ Bind(fixup_label);
300 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000301 __ jmp(GetExitLabel());
302 }
303
Alexandre Rames9931f312015-06-19 14:47:01 +0100304 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathX86"; }
305
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000306 private:
307 // The class this slow path will load.
308 HLoadClass* const cls_;
309
Nicolas Geoffray424f6762014-11-03 14:51:25 +0000310 // The dex PC of `at_`.
311 const uint32_t dex_pc_;
312
313 // Whether to initialize the class.
314 const bool do_clinit_;
315
316 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
317};
318
Andreas Gampe85b62f22015-09-09 13:15:38 -0700319class TypeCheckSlowPathX86 : public SlowPathCode {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000320 public:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000321 TypeCheckSlowPathX86(HInstruction* instruction, bool is_fatal)
David Srbecky9cd6d372016-02-09 15:24:47 +0000322 : SlowPathCode(instruction), is_fatal_(is_fatal) {}
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000323
Alexandre Rames2ed20af2015-03-06 13:55:35 +0000324 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000325 LocationSummary* locations = instruction_->GetLocations();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000326 DCHECK(instruction_->IsCheckCast()
327 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000328
329 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
330 __ Bind(GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000331
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000332 if (!is_fatal_) {
333 SaveLiveRegisters(codegen, locations);
334 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000335
336 // We're moving two locations to locations that could overlap, so we need a parallel
337 // move resolver.
338 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800339 x86_codegen->EmitParallelMoves(locations->InAt(0),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800340 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
341 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800342 locations->InAt(1),
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800343 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
344 Primitive::kPrimNot);
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000345 if (instruction_->IsInstanceOf()) {
Serban Constantinescuba45db02016-07-12 22:53:02 +0100346 x86_codegen->InvokeRuntime(kQuickInstanceofNonTrivial,
Alexandre Rames8158f282015-08-07 10:26:17 +0100347 instruction_,
348 instruction_->GetDexPc(),
349 this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800350 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000351 } else {
352 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800353 x86_codegen->InvokeRuntime(kQuickCheckInstanceOf,
354 instruction_,
355 instruction_->GetDexPc(),
356 this);
357 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +0000358 }
359
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000360 if (!is_fatal_) {
361 if (instruction_->IsInstanceOf()) {
362 x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
363 }
364 RestoreLiveRegisters(codegen, locations);
Nicolas Geoffray75374372015-09-17 17:12:19 +0000365
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000366 __ jmp(GetExitLabel());
367 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000368 }
369
Alexandre Rames9931f312015-06-19 14:47:01 +0100370 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86"; }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000371 bool IsFatal() const OVERRIDE { return is_fatal_; }
Alexandre Rames9931f312015-06-19 14:47:01 +0100372
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000373 private:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +0000374 const bool is_fatal_;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +0000375
376 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
377};
378
Andreas Gampe85b62f22015-09-09 13:15:38 -0700379class DeoptimizationSlowPathX86 : public SlowPathCode {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700380 public:
Aart Bik42249c32016-01-07 15:33:50 -0800381 explicit DeoptimizationSlowPathX86(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000382 : SlowPathCode(instruction) {}
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700383
384 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexandre Rames8158f282015-08-07 10:26:17 +0100385 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700386 __ Bind(GetEntryLabel());
Serban Constantinescuba45db02016-07-12 22:53:02 +0100387 x86_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000388 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700389 }
390
Alexandre Rames9931f312015-06-19 14:47:01 +0100391 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathX86"; }
392
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700393 private:
Mingyao Yangd43b3ac2015-04-01 14:03:04 -0700394 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathX86);
395};
396
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100397class ArraySetSlowPathX86 : public SlowPathCode {
398 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000399 explicit ArraySetSlowPathX86(HInstruction* instruction) : SlowPathCode(instruction) {}
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100400
401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
402 LocationSummary* locations = instruction_->GetLocations();
403 __ Bind(GetEntryLabel());
404 SaveLiveRegisters(codegen, locations);
405
406 InvokeRuntimeCallingConvention calling_convention;
407 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
408 parallel_move.AddMove(
409 locations->InAt(0),
410 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
411 Primitive::kPrimNot,
412 nullptr);
413 parallel_move.AddMove(
414 locations->InAt(1),
415 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
416 Primitive::kPrimInt,
417 nullptr);
418 parallel_move.AddMove(
419 locations->InAt(2),
420 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
421 Primitive::kPrimNot,
422 nullptr);
423 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
424
425 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100426 x86_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000427 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100428 RestoreLiveRegisters(codegen, locations);
429 __ jmp(GetExitLabel());
430 }
431
432 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathX86"; }
433
434 private:
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +0100435 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathX86);
436};
437
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100438// Slow path marking an object reference `ref` during a read
439// barrier. The field `obj.field` in the object `obj` holding this
440// reference does not get updated by this slow path after marking (see
441// ReadBarrierMarkAndUpdateFieldSlowPathX86 below for that).
442//
443// This means that after the execution of this slow path, `ref` will
444// always be up-to-date, but `obj.field` may not; i.e., after the
445// flip, `ref` will be a to-space reference, but `obj.field` will
446// probably still be a from-space reference (unless it gets updated by
447// another thread, or if another thread installed another object
448// reference (different from `ref`) in `obj.field`).
Roland Levillain7c1559a2015-12-15 10:55:36 +0000449class ReadBarrierMarkSlowPathX86 : public SlowPathCode {
450 public:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100451 ReadBarrierMarkSlowPathX86(HInstruction* instruction,
452 Location ref,
453 bool unpoison_ref_before_marking)
454 : SlowPathCode(instruction),
455 ref_(ref),
456 unpoison_ref_before_marking_(unpoison_ref_before_marking) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000457 DCHECK(kEmitCompilerReadBarrier);
458 }
459
460 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathX86"; }
461
462 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
463 LocationSummary* locations = instruction_->GetLocations();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100464 Register ref_reg = ref_.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +0000465 DCHECK(locations->CanCall());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100466 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000467 DCHECK(instruction_->IsInstanceFieldGet() ||
468 instruction_->IsStaticFieldGet() ||
469 instruction_->IsArrayGet() ||
Roland Levillain16d9f942016-08-25 17:27:56 +0100470 instruction_->IsArraySet() ||
Roland Levillain7c1559a2015-12-15 10:55:36 +0000471 instruction_->IsLoadClass() ||
472 instruction_->IsLoadString() ||
473 instruction_->IsInstanceOf() ||
Roland Levillain3d312422016-06-23 13:53:42 +0100474 instruction_->IsCheckCast() ||
Roland Levillain0b671c02016-08-19 12:02:34 +0100475 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
476 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000477 << "Unexpected instruction in read barrier marking slow path: "
478 << instruction_->DebugName();
479
480 __ Bind(GetEntryLabel());
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100481 if (unpoison_ref_before_marking_) {
Vladimir Marko953437b2016-08-24 08:30:46 +0000482 // Object* ref = ref_addr->AsMirrorPtr()
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100483 __ MaybeUnpoisonHeapReference(ref_reg);
Vladimir Marko953437b2016-08-24 08:30:46 +0000484 }
Roland Levillain4359e612016-07-20 11:32:19 +0100485 // No need to save live registers; it's taken care of by the
486 // entrypoint. Also, there is no need to update the stack mask,
487 // as this runtime call will not trigger a garbage collection.
Roland Levillain7c1559a2015-12-15 10:55:36 +0000488 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100489 DCHECK_NE(ref_reg, ESP);
490 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
Roland Levillain02b75802016-07-13 11:54:35 +0100491 // "Compact" slow path, saving two moves.
492 //
493 // Instead of using the standard runtime calling convention (input
494 // and output in EAX):
495 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100496 // EAX <- ref
Roland Levillain02b75802016-07-13 11:54:35 +0100497 // EAX <- ReadBarrierMark(EAX)
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100498 // ref <- EAX
Roland Levillain02b75802016-07-13 11:54:35 +0100499 //
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100500 // we just use rX (the register containing `ref`) as input and output
Roland Levillain02b75802016-07-13 11:54:35 +0100501 // of a dedicated entrypoint:
502 //
503 // rX <- ReadBarrierMarkRegX(rX)
504 //
505 int32_t entry_point_offset =
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100506 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
Roland Levillaindec8f632016-07-22 17:10:06 +0100507 // This runtime call does not require a stack map.
508 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
Roland Levillain7c1559a2015-12-15 10:55:36 +0000509 __ jmp(GetExitLabel());
510 }
511
512 private:
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100513 // The location (register) of the marked object reference.
514 const Location ref_;
515 // Should the reference in `ref_` be unpoisoned prior to marking it?
516 const bool unpoison_ref_before_marking_;
Roland Levillain7c1559a2015-12-15 10:55:36 +0000517
518 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathX86);
519};
520
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100521// Slow path marking an object reference `ref` during a read barrier,
522// and if needed, atomically updating the field `obj.field` in the
523// object `obj` holding this reference after marking (contrary to
524// ReadBarrierMarkSlowPathX86 above, which never tries to update
525// `obj.field`).
526//
527// This means that after the execution of this slow path, both `ref`
528// and `obj.field` will be up-to-date; i.e., after the flip, both will
529// hold the same to-space reference (unless another thread installed
530// another object reference (different from `ref`) in `obj.field`).
531class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
532 public:
533 ReadBarrierMarkAndUpdateFieldSlowPathX86(HInstruction* instruction,
534 Location ref,
535 Register obj,
536 const Address& field_addr,
537 bool unpoison_ref_before_marking,
538 Register temp)
539 : SlowPathCode(instruction),
540 ref_(ref),
541 obj_(obj),
542 field_addr_(field_addr),
543 unpoison_ref_before_marking_(unpoison_ref_before_marking),
544 temp_(temp) {
545 DCHECK(kEmitCompilerReadBarrier);
546 }
547
548 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkAndUpdateFieldSlowPathX86"; }
549
550 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
551 LocationSummary* locations = instruction_->GetLocations();
552 Register ref_reg = ref_.AsRegister<Register>();
553 DCHECK(locations->CanCall());
554 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
555 // This slow path is only used by the UnsafeCASObject intrinsic.
556 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
557 << "Unexpected instruction in read barrier marking and field updating slow path: "
558 << instruction_->DebugName();
559 DCHECK(instruction_->GetLocations()->Intrinsified());
560 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
561
562 __ Bind(GetEntryLabel());
563 if (unpoison_ref_before_marking_) {
564 // Object* ref = ref_addr->AsMirrorPtr()
565 __ MaybeUnpoisonHeapReference(ref_reg);
566 }
567
568 // Save the old (unpoisoned) reference.
569 __ movl(temp_, ref_reg);
570
571 // No need to save live registers; it's taken care of by the
572 // entrypoint. Also, there is no need to update the stack mask,
573 // as this runtime call will not trigger a garbage collection.
574 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
575 DCHECK_NE(ref_reg, ESP);
576 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg;
577 // "Compact" slow path, saving two moves.
578 //
579 // Instead of using the standard runtime calling convention (input
580 // and output in EAX):
581 //
582 // EAX <- ref
583 // EAX <- ReadBarrierMark(EAX)
584 // ref <- EAX
585 //
586 // we just use rX (the register containing `ref`) as input and output
587 // of a dedicated entrypoint:
588 //
589 // rX <- ReadBarrierMarkRegX(rX)
590 //
591 int32_t entry_point_offset =
592 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg);
593 // This runtime call does not require a stack map.
594 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
595
596 // If the new reference is different from the old reference,
597 // update the field in the holder (`*field_addr`).
598 //
599 // Note that this field could also hold a different object, if
600 // another thread had concurrently changed it. In that case, the
601 // LOCK CMPXCHGL instruction in the compare-and-set (CAS)
602 // operation below would abort the CAS, leaving the field as-is.
603 NearLabel done;
604 __ cmpl(temp_, ref_reg);
605 __ j(kEqual, &done);
606
607 // Update the the holder's field atomically. This may fail if
608 // mutator updates before us, but it's OK. This is achieved
609 // using a strong compare-and-set (CAS) operation with relaxed
610 // memory synchronization ordering, where the expected value is
611 // the old reference and the desired value is the new reference.
612 // This operation is implemented with a 32-bit LOCK CMPXLCHG
613 // instruction, which requires the expected value (the old
614 // reference) to be in EAX. Save EAX beforehand, and move the
615 // expected value (stored in `temp_`) into EAX.
616 __ pushl(EAX);
617 __ movl(EAX, temp_);
618
619 // Convenience aliases.
620 Register base = obj_;
621 Register expected = EAX;
622 Register value = ref_reg;
623
624 bool base_equals_value = (base == value);
625 if (kPoisonHeapReferences) {
626 if (base_equals_value) {
627 // If `base` and `value` are the same register location, move
628 // `value` to a temporary register. This way, poisoning
629 // `value` won't invalidate `base`.
630 value = temp_;
631 __ movl(value, base);
632 }
633
634 // Check that the register allocator did not assign the location
635 // of `expected` (EAX) to `value` nor to `base`, so that heap
636 // poisoning (when enabled) works as intended below.
637 // - If `value` were equal to `expected`, both references would
638 // be poisoned twice, meaning they would not be poisoned at
639 // all, as heap poisoning uses address negation.
640 // - If `base` were equal to `expected`, poisoning `expected`
641 // would invalidate `base`.
642 DCHECK_NE(value, expected);
643 DCHECK_NE(base, expected);
644
645 __ PoisonHeapReference(expected);
646 __ PoisonHeapReference(value);
647 }
648
649 __ LockCmpxchgl(field_addr_, value);
650
651 // If heap poisoning is enabled, we need to unpoison the values
652 // that were poisoned earlier.
653 if (kPoisonHeapReferences) {
654 if (base_equals_value) {
655 // `value` has been moved to a temporary register, no need
656 // to unpoison it.
657 } else {
658 __ UnpoisonHeapReference(value);
659 }
660 // No need to unpoison `expected` (EAX), as it is be overwritten below.
661 }
662
663 // Restore EAX.
664 __ popl(EAX);
665
666 __ Bind(&done);
667 __ jmp(GetExitLabel());
668 }
669
670 private:
671 // The location (register) of the marked object reference.
672 const Location ref_;
673 // The register containing the object holding the marked object reference field.
674 const Register obj_;
675 // The address of the marked reference field. The base of this address must be `obj_`.
676 const Address field_addr_;
677
678 // Should the reference in `ref_` be unpoisoned prior to marking it?
679 const bool unpoison_ref_before_marking_;
680
681 const Register temp_;
682
683 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathX86);
684};
685
Roland Levillain0d5a2812015-11-13 10:07:31 +0000686// Slow path generating a read barrier for a heap reference.
687class ReadBarrierForHeapReferenceSlowPathX86 : public SlowPathCode {
688 public:
689 ReadBarrierForHeapReferenceSlowPathX86(HInstruction* instruction,
690 Location out,
691 Location ref,
692 Location obj,
693 uint32_t offset,
694 Location index)
David Srbecky9cd6d372016-02-09 15:24:47 +0000695 : SlowPathCode(instruction),
Roland Levillain0d5a2812015-11-13 10:07:31 +0000696 out_(out),
697 ref_(ref),
698 obj_(obj),
699 offset_(offset),
700 index_(index) {
701 DCHECK(kEmitCompilerReadBarrier);
702 // If `obj` is equal to `out` or `ref`, it means the initial object
703 // has been overwritten by (or after) the heap object reference load
704 // to be instrumented, e.g.:
705 //
706 // __ movl(out, Address(out, offset));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000707 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000708 //
709 // In that case, we have lost the information about the original
710 // object, and the emitted read barrier cannot work properly.
711 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
712 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
713 }
714
715 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
716 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
717 LocationSummary* locations = instruction_->GetLocations();
718 Register reg_out = out_.AsRegister<Register>();
719 DCHECK(locations->CanCall());
720 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain3d312422016-06-23 13:53:42 +0100721 DCHECK(instruction_->IsInstanceFieldGet() ||
722 instruction_->IsStaticFieldGet() ||
723 instruction_->IsArrayGet() ||
724 instruction_->IsInstanceOf() ||
725 instruction_->IsCheckCast() ||
Andreas Gamped9911ee2017-03-27 13:27:24 -0700726 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
Roland Levillain7c1559a2015-12-15 10:55:36 +0000727 << "Unexpected instruction in read barrier for heap reference slow path: "
728 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000729
730 __ Bind(GetEntryLabel());
731 SaveLiveRegisters(codegen, locations);
732
733 // We may have to change the index's value, but as `index_` is a
734 // constant member (like other "inputs" of this slow path),
735 // introduce a copy of it, `index`.
736 Location index = index_;
737 if (index_.IsValid()) {
Roland Levillain3d312422016-06-23 13:53:42 +0100738 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
Roland Levillain0d5a2812015-11-13 10:07:31 +0000739 if (instruction_->IsArrayGet()) {
740 // Compute the actual memory offset and store it in `index`.
741 Register index_reg = index_.AsRegister<Register>();
742 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
743 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
744 // We are about to change the value of `index_reg` (see the
745 // calls to art::x86::X86Assembler::shll and
746 // art::x86::X86Assembler::AddImmediate below), but it has
747 // not been saved by the previous call to
748 // art::SlowPathCode::SaveLiveRegisters, as it is a
749 // callee-save register --
750 // art::SlowPathCode::SaveLiveRegisters does not consider
751 // callee-save registers, as it has been designed with the
752 // assumption that callee-save registers are supposed to be
753 // handled by the called function. So, as a callee-save
754 // register, `index_reg` _would_ eventually be saved onto
755 // the stack, but it would be too late: we would have
756 // changed its value earlier. Therefore, we manually save
757 // it here into another freely available register,
758 // `free_reg`, chosen of course among the caller-save
759 // registers (as a callee-save `free_reg` register would
760 // exhibit the same problem).
761 //
762 // Note we could have requested a temporary register from
763 // the register allocator instead; but we prefer not to, as
764 // this is a slow path, and we know we can find a
765 // caller-save register that is available.
766 Register free_reg = FindAvailableCallerSaveRegister(codegen);
767 __ movl(free_reg, index_reg);
768 index_reg = free_reg;
769 index = Location::RegisterLocation(index_reg);
770 } else {
771 // The initial register stored in `index_` has already been
772 // saved in the call to art::SlowPathCode::SaveLiveRegisters
773 // (as it is not a callee-save register), so we can freely
774 // use it.
775 }
776 // Shifting the index value contained in `index_reg` by the scale
777 // factor (2) cannot overflow in practice, as the runtime is
778 // unable to allocate object arrays with a size larger than
779 // 2^26 - 1 (that is, 2^28 - 4 bytes).
780 __ shll(index_reg, Immediate(TIMES_4));
781 static_assert(
782 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
783 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
784 __ AddImmediate(index_reg, Immediate(offset_));
785 } else {
Roland Levillain3d312422016-06-23 13:53:42 +0100786 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
787 // intrinsics, `index_` is not shifted by a scale factor of 2
788 // (as in the case of ArrayGet), as it is actually an offset
789 // to an object field within an object.
790 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000791 DCHECK(instruction_->GetLocations()->Intrinsified());
792 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
793 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
794 << instruction_->AsInvoke()->GetIntrinsic();
795 DCHECK_EQ(offset_, 0U);
796 DCHECK(index_.IsRegisterPair());
797 // UnsafeGet's offset location is a register pair, the low
798 // part contains the correct offset.
799 index = index_.ToLow();
800 }
801 }
802
803 // We're moving two or three locations to locations that could
804 // overlap, so we need a parallel move resolver.
805 InvokeRuntimeCallingConvention calling_convention;
806 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
807 parallel_move.AddMove(ref_,
808 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
809 Primitive::kPrimNot,
810 nullptr);
811 parallel_move.AddMove(obj_,
812 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
813 Primitive::kPrimNot,
814 nullptr);
815 if (index.IsValid()) {
816 parallel_move.AddMove(index,
817 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
818 Primitive::kPrimInt,
819 nullptr);
820 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
821 } else {
822 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
823 __ movl(calling_convention.GetRegisterAt(2), Immediate(offset_));
824 }
Serban Constantinescuba45db02016-07-12 22:53:02 +0100825 x86_codegen->InvokeRuntime(kQuickReadBarrierSlow, instruction_, instruction_->GetDexPc(), this);
Roland Levillain0d5a2812015-11-13 10:07:31 +0000826 CheckEntrypointTypes<
827 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
828 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
829
830 RestoreLiveRegisters(codegen, locations);
831 __ jmp(GetExitLabel());
832 }
833
834 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathX86"; }
835
836 private:
837 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
838 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
839 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
840 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
841 if (i != ref && i != obj && !codegen->IsCoreCalleeSaveRegister(i)) {
842 return static_cast<Register>(i);
843 }
844 }
845 // We shall never fail to find a free caller-save register, as
846 // there are more than two core caller-save registers on x86
847 // (meaning it is possible to find one which is different from
848 // `ref` and `obj`).
849 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
850 LOG(FATAL) << "Could not find a free caller-save register";
851 UNREACHABLE();
852 }
853
Roland Levillain0d5a2812015-11-13 10:07:31 +0000854 const Location out_;
855 const Location ref_;
856 const Location obj_;
857 const uint32_t offset_;
858 // An additional location containing an index to an array.
859 // Only used for HArrayGet and the UnsafeGetObject &
860 // UnsafeGetObjectVolatile intrinsics.
861 const Location index_;
862
863 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathX86);
864};
865
866// Slow path generating a read barrier for a GC root.
867class ReadBarrierForRootSlowPathX86 : public SlowPathCode {
868 public:
869 ReadBarrierForRootSlowPathX86(HInstruction* instruction, Location out, Location root)
David Srbecky9cd6d372016-02-09 15:24:47 +0000870 : SlowPathCode(instruction), out_(out), root_(root) {
Roland Levillain7c1559a2015-12-15 10:55:36 +0000871 DCHECK(kEmitCompilerReadBarrier);
872 }
Roland Levillain0d5a2812015-11-13 10:07:31 +0000873
874 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
875 LocationSummary* locations = instruction_->GetLocations();
876 Register reg_out = out_.AsRegister<Register>();
877 DCHECK(locations->CanCall());
878 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
Roland Levillain7c1559a2015-12-15 10:55:36 +0000879 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
880 << "Unexpected instruction in read barrier for GC root slow path: "
881 << instruction_->DebugName();
Roland Levillain0d5a2812015-11-13 10:07:31 +0000882
883 __ Bind(GetEntryLabel());
884 SaveLiveRegisters(codegen, locations);
885
886 InvokeRuntimeCallingConvention calling_convention;
887 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
888 x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100889 x86_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000890 instruction_,
891 instruction_->GetDexPc(),
892 this);
893 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
894 x86_codegen->Move32(out_, Location::RegisterLocation(EAX));
895
896 RestoreLiveRegisters(codegen, locations);
897 __ jmp(GetExitLabel());
898 }
899
900 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathX86"; }
901
902 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000903 const Location out_;
904 const Location root_;
905
906 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathX86);
907};
908
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100909#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100910// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
911#define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100912
Aart Bike9f37602015-10-09 11:15:55 -0700913inline Condition X86Condition(IfCondition cond) {
Dave Allison20dfc792014-06-16 20:44:29 -0700914 switch (cond) {
915 case kCondEQ: return kEqual;
916 case kCondNE: return kNotEqual;
917 case kCondLT: return kLess;
918 case kCondLE: return kLessEqual;
919 case kCondGT: return kGreater;
920 case kCondGE: return kGreaterEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700921 case kCondB: return kBelow;
922 case kCondBE: return kBelowEqual;
923 case kCondA: return kAbove;
924 case kCondAE: return kAboveEqual;
Dave Allison20dfc792014-06-16 20:44:29 -0700925 }
Roland Levillain4fa13f62015-07-06 18:11:54 +0100926 LOG(FATAL) << "Unreachable";
927 UNREACHABLE();
928}
929
Aart Bike9f37602015-10-09 11:15:55 -0700930// Maps signed condition to unsigned condition and FP condition to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100931inline Condition X86UnsignedOrFPCondition(IfCondition cond) {
932 switch (cond) {
933 case kCondEQ: return kEqual;
934 case kCondNE: return kNotEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700935 // Signed to unsigned, and FP to x86 name.
Roland Levillain4fa13f62015-07-06 18:11:54 +0100936 case kCondLT: return kBelow;
937 case kCondLE: return kBelowEqual;
938 case kCondGT: return kAbove;
939 case kCondGE: return kAboveEqual;
Aart Bike9f37602015-10-09 11:15:55 -0700940 // Unsigned remain unchanged.
941 case kCondB: return kBelow;
942 case kCondBE: return kBelowEqual;
943 case kCondA: return kAbove;
944 case kCondAE: return kAboveEqual;
Roland Levillain4fa13f62015-07-06 18:11:54 +0100945 }
946 LOG(FATAL) << "Unreachable";
947 UNREACHABLE();
Dave Allison20dfc792014-06-16 20:44:29 -0700948}
949
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100950void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100951 stream << Register(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100952}
953
954void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdilc74652862015-05-13 17:50:09 +0100955 stream << XmmRegister(reg);
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100956}
957
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100958size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
959 __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
960 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100961}
962
Nicolas Geoffray102cbed2014-10-15 18:31:05 +0100963size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
964 __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
965 return kX86WordSize;
Nicolas Geoffray3bca0df2014-09-19 11:01:00 +0100966}
967
Mark Mendell7c8d0092015-01-26 11:21:33 -0500968size_t CodeGeneratorX86::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700969 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700970 __ movups(Address(ESP, stack_index), XmmRegister(reg_id));
Aart Bikb13c65b2017-03-21 20:14:07 -0700971 } else {
972 __ movsd(Address(ESP, stack_index), XmmRegister(reg_id));
973 }
Mark Mendell7c8d0092015-01-26 11:21:33 -0500974 return GetFloatingPointSpillSlotSize();
975}
976
977size_t CodeGeneratorX86::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Aart Bikb13c65b2017-03-21 20:14:07 -0700978 if (GetGraph()->HasSIMD()) {
Aart Bik5576f372017-03-23 16:17:37 -0700979 __ movups(XmmRegister(reg_id), Address(ESP, stack_index));
Aart Bikb13c65b2017-03-21 20:14:07 -0700980 } else {
981 __ movsd(XmmRegister(reg_id), Address(ESP, stack_index));
982 }
Mark Mendell7c8d0092015-01-26 11:21:33 -0500983 return GetFloatingPointSpillSlotSize();
984}
985
Calin Juravle175dc732015-08-25 15:42:32 +0100986void CodeGeneratorX86::InvokeRuntime(QuickEntrypointEnum entrypoint,
987 HInstruction* instruction,
988 uint32_t dex_pc,
989 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100990 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +0100991 GenerateInvokeRuntime(GetThreadOffset<kX86PointerSize>(entrypoint).Int32Value());
992 if (EntrypointRequiresStackMap(entrypoint)) {
993 RecordPcInfo(instruction, dex_pc, slow_path);
994 }
Alexandre Rames8158f282015-08-07 10:26:17 +0100995}
996
Roland Levillaindec8f632016-07-22 17:10:06 +0100997void CodeGeneratorX86::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
998 HInstruction* instruction,
999 SlowPathCode* slow_path) {
1000 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
Serban Constantinescuba45db02016-07-12 22:53:02 +01001001 GenerateInvokeRuntime(entry_point_offset);
1002}
1003
1004void CodeGeneratorX86::GenerateInvokeRuntime(int32_t entry_point_offset) {
Roland Levillaindec8f632016-07-22 17:10:06 +01001005 __ fs()->call(Address::Absolute(entry_point_offset));
1006}
1007
Mark Mendellfb8d2792015-03-31 22:16:59 -04001008CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001009 const X86InstructionSetFeatures& isa_features,
1010 const CompilerOptions& compiler_options,
1011 OptimizingCompilerStats* stats)
Mark Mendell5f874182015-03-04 15:42:45 -05001012 : CodeGenerator(graph,
1013 kNumberOfCpuRegisters,
1014 kNumberOfXmmRegisters,
1015 kNumberOfRegisterPairs,
1016 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1017 arraysize(kCoreCalleeSaves))
1018 | (1 << kFakeReturnRegister),
Serban Constantinescuecc43662015-08-13 13:33:12 +01001019 0,
1020 compiler_options,
1021 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +01001022 block_labels_(nullptr),
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001023 location_builder_(graph, this),
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001024 instruction_visitor_(graph, this),
Mark Mendellfb8d2792015-03-31 22:16:59 -04001025 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001026 assembler_(graph->GetArena()),
Vladimir Marko58155012015-08-19 12:49:41 +00001027 isa_features_(isa_features),
Vladimir Marko0f7dca42015-11-02 14:36:43 +00001028 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markocac5a7e2016-02-22 10:39:50 +00001029 string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001030 boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1031 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray132d8362016-11-16 09:19:42 +00001032 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00001033 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko93205e32016-04-13 11:59:46 +01001034 constant_area_start_(-1),
1035 fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001036 method_address_offset_(std::less<uint32_t>(),
1037 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001038 // Use a fake return address register to mimic Quick.
1039 AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001040}
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001041
David Brazdil58282f42016-01-14 12:45:10 +00001042void CodeGeneratorX86::SetupBlockedRegisters() const {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001043 // Stack register is always reserved.
Nicolas Geoffray71175b72014-10-09 22:13:55 +01001044 blocked_core_registers_[ESP] = true;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001045}
1046
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001047InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001048 : InstructionCodeGenerator(graph, codegen),
Nicolas Geoffray4a34a422014-04-03 10:38:37 +01001049 assembler_(codegen->GetAssembler()),
1050 codegen_(codegen) {}
1051
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001052static dwarf::Reg DWARFReg(Register reg) {
David Srbecky9d8606d2015-04-12 09:35:32 +01001053 return dwarf::Reg::X86Core(static_cast<int>(reg));
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001054}
1055
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001056void CodeGeneratorX86::GenerateFrameEntry() {
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001057 __ cfi().SetCurrentCFAOffset(kX86WordSize); // return address
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00001058 __ Bind(&frame_entry_label_);
Roland Levillain199f3362014-11-27 17:15:16 +00001059 bool skip_overflow_check =
1060 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001061 DCHECK(GetCompilerOptions().GetImplicitStackOverflowChecks());
Calin Juravle93edf732015-01-20 20:14:07 +00001062
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +00001063 if (!skip_overflow_check) {
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001064 __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
Nicolas Geoffray39468442014-09-02 15:17:15 +01001065 RecordPcInfo(nullptr, 0);
Nicolas Geoffray397f2e42014-07-23 12:57:19 +01001066 }
1067
Mark Mendell5f874182015-03-04 15:42:45 -05001068 if (HasEmptyFrame()) {
1069 return;
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001070 }
Mark Mendell5f874182015-03-04 15:42:45 -05001071
1072 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1073 Register reg = kCoreCalleeSaves[i];
1074 if (allocated_registers_.ContainsCoreRegister(reg)) {
1075 __ pushl(reg);
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001076 __ cfi().AdjustCFAOffset(kX86WordSize);
1077 __ cfi().RelOffset(DWARFReg(reg), 0);
Mark Mendell5f874182015-03-04 15:42:45 -05001078 }
1079 }
1080
Mingyao Yang063fc772016-08-02 11:02:54 -07001081 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1082 // Initialize should_deoptimize flag to 0.
1083 __ movl(Address(ESP, -kShouldDeoptimizeFlagSize), Immediate(0));
1084 }
1085
David Srbeckyc6b4dd82015-04-07 20:32:43 +01001086 int adjust = GetFrameSize() - FrameEntrySpillSize();
1087 __ subl(ESP, Immediate(adjust));
1088 __ cfi().AdjustCFAOffset(adjust);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001089 // Save the current method if we need it. Note that we do not
1090 // do this in HCurrentMethod, as the instruction might have been removed
1091 // in the SSA graph.
1092 if (RequiresCurrentMethod()) {
1093 __ movl(Address(ESP, kCurrentMethodStackOffset), kMethodRegisterArgument);
1094 }
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001095}
1096
1097void CodeGeneratorX86::GenerateFrameExit() {
David Srbeckyc34dc932015-04-12 09:27:43 +01001098 __ cfi().RememberState();
1099 if (!HasEmptyFrame()) {
1100 int adjust = GetFrameSize() - FrameEntrySpillSize();
1101 __ addl(ESP, Immediate(adjust));
1102 __ cfi().AdjustCFAOffset(-adjust);
Mark Mendell5f874182015-03-04 15:42:45 -05001103
David Srbeckyc34dc932015-04-12 09:27:43 +01001104 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
1105 Register reg = kCoreCalleeSaves[i];
1106 if (allocated_registers_.ContainsCoreRegister(reg)) {
1107 __ popl(reg);
1108 __ cfi().AdjustCFAOffset(-static_cast<int>(kX86WordSize));
1109 __ cfi().Restore(DWARFReg(reg));
1110 }
Mark Mendell5f874182015-03-04 15:42:45 -05001111 }
Nicolas Geoffrayc0572a42015-02-06 14:35:25 +00001112 }
David Srbeckyc34dc932015-04-12 09:27:43 +01001113 __ ret();
1114 __ cfi().RestoreState();
1115 __ cfi().DefCFAOffset(GetFrameSize());
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001116}
1117
Nicolas Geoffray92a73ae2014-10-16 11:12:52 +01001118void CodeGeneratorX86::Bind(HBasicBlock* block) {
1119 __ Bind(GetLabelOf(block));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001120}
1121
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001122Location InvokeDexCallingConventionVisitorX86::GetReturnLocation(Primitive::Type type) const {
1123 switch (type) {
1124 case Primitive::kPrimBoolean:
1125 case Primitive::kPrimByte:
1126 case Primitive::kPrimChar:
1127 case Primitive::kPrimShort:
1128 case Primitive::kPrimInt:
1129 case Primitive::kPrimNot:
1130 return Location::RegisterLocation(EAX);
1131
1132 case Primitive::kPrimLong:
1133 return Location::RegisterPairLocation(EAX, EDX);
1134
1135 case Primitive::kPrimVoid:
1136 return Location::NoLocation();
1137
1138 case Primitive::kPrimDouble:
1139 case Primitive::kPrimFloat:
1140 return Location::FpuRegisterLocation(XMM0);
1141 }
Nicolas Geoffray0d1652e2015-06-03 12:12:19 +01001142
1143 UNREACHABLE();
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01001144}
1145
1146Location InvokeDexCallingConventionVisitorX86::GetMethodLocation() const {
1147 return Location::RegisterLocation(kMethodRegisterArgument);
1148}
1149
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001150Location InvokeDexCallingConventionVisitorX86::GetNextLocation(Primitive::Type type) {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001151 switch (type) {
1152 case Primitive::kPrimBoolean:
1153 case Primitive::kPrimByte:
1154 case Primitive::kPrimChar:
1155 case Primitive::kPrimShort:
1156 case Primitive::kPrimInt:
1157 case Primitive::kPrimNot: {
1158 uint32_t index = gp_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001159 stack_index_++;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001160 if (index < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001161 return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001162 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001163 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001164 }
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001165 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001166
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001167 case Primitive::kPrimLong: {
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001168 uint32_t index = gp_index_;
1169 gp_index_ += 2;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001170 stack_index_ += 2;
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001171 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001172 X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
1173 calling_convention.GetRegisterPairAt(index));
1174 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001175 } else {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001176 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
1177 }
1178 }
1179
1180 case Primitive::kPrimFloat: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001181 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001182 stack_index_++;
1183 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1184 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1185 } else {
1186 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
1187 }
1188 }
1189
1190 case Primitive::kPrimDouble: {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01001191 uint32_t index = float_index_++;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00001192 stack_index_ += 2;
1193 if (index < calling_convention.GetNumberOfFpuRegisters()) {
1194 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(index));
1195 } else {
1196 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001197 }
1198 }
1199
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001200 case Primitive::kPrimVoid:
1201 LOG(FATAL) << "Unexpected parameter type " << type;
1202 break;
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001203 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00001204 return Location::NoLocation();
Nicolas Geoffraya747a392014-04-17 14:56:23 +01001205}
Nicolas Geoffraydb928fc2014-04-16 17:38:32 +01001206
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001207void CodeGeneratorX86::Move32(Location destination, Location source) {
1208 if (source.Equals(destination)) {
1209 return;
1210 }
1211 if (destination.IsRegister()) {
1212 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001213 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001214 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001215 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001216 } else {
1217 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001218 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001219 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001220 } else if (destination.IsFpuRegister()) {
1221 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001222 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001223 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001224 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001225 } else {
1226 DCHECK(source.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00001227 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001228 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001229 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001230 DCHECK(destination.IsStackSlot()) << destination;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001231 if (source.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001232 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001233 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001234 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05001235 } else if (source.IsConstant()) {
1236 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00001237 int32_t value = GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05001238 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001239 } else {
1240 DCHECK(source.IsStackSlot());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01001241 __ pushl(Address(ESP, source.GetStackIndex()));
1242 __ popl(Address(ESP, destination.GetStackIndex()));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001243 }
1244 }
1245}
1246
1247void CodeGeneratorX86::Move64(Location destination, Location source) {
1248 if (source.Equals(destination)) {
1249 return;
1250 }
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001251 if (destination.IsRegisterPair()) {
1252 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001253 EmitParallelMoves(
1254 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
1255 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001256 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001257 Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001258 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
1259 Primitive::kPrimInt);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001260 } else if (source.IsFpuRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001261 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
1262 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
1263 __ psrlq(src_reg, Immediate(32));
1264 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001265 } else {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001266 // No conflict possible, so just do the moves.
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001267 DCHECK(source.IsDoubleStackSlot());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001268 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
1269 __ movl(destination.AsRegisterPairHigh<Register>(),
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001270 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
1271 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001272 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05001273 if (source.IsFpuRegister()) {
1274 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
1275 } else if (source.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001276 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Calin Juravlee460d1d2015-09-29 04:52:17 +01001277 } else if (source.IsRegisterPair()) {
1278 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
1279 // Create stack space for 2 elements.
1280 __ subl(ESP, Immediate(2 * elem_size));
1281 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
1282 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
1283 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
1284 // And remove the temporary stack space we allocated.
1285 __ addl(ESP, Immediate(2 * elem_size));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001286 } else {
1287 LOG(FATAL) << "Unimplemented";
1288 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001289 } else {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00001290 DCHECK(destination.IsDoubleStackSlot()) << destination;
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001291 if (source.IsRegisterPair()) {
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001292 // No conflict possible, so just do the moves.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001293 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001294 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01001295 source.AsRegisterPairHigh<Register>());
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01001296 } else if (source.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00001297 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001298 } else if (source.IsConstant()) {
1299 HConstant* constant = source.GetConstant();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001300 DCHECK(constant->IsLongConstant() || constant->IsDoubleConstant());
1301 int64_t value = GetInt64ValueOf(constant);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001302 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(Low32Bits(value)));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001303 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
1304 Immediate(High32Bits(value)));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001305 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00001306 DCHECK(source.IsDoubleStackSlot()) << source;
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001307 EmitParallelMoves(
1308 Location::StackSlot(source.GetStackIndex()),
1309 Location::StackSlot(destination.GetStackIndex()),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001310 Primitive::kPrimInt,
Nicolas Geoffray32b2a522014-11-27 14:54:18 +00001311 Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
Nicolas Geoffray90218252015-04-15 11:56:51 +01001312 Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)),
1313 Primitive::kPrimInt);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01001314 }
1315 }
1316}
1317
Calin Juravle175dc732015-08-25 15:42:32 +01001318void CodeGeneratorX86::MoveConstant(Location location, int32_t value) {
1319 DCHECK(location.IsRegister());
1320 __ movl(location.AsRegister<Register>(), Immediate(value));
1321}
1322
Calin Juravlee460d1d2015-09-29 04:52:17 +01001323void CodeGeneratorX86::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001324 HParallelMove move(GetGraph()->GetArena());
1325 if (dst_type == Primitive::kPrimLong && !src.IsConstant() && !src.IsFpuRegister()) {
1326 move.AddMove(src.ToLow(), dst.ToLow(), Primitive::kPrimInt, nullptr);
1327 move.AddMove(src.ToHigh(), dst.ToHigh(), Primitive::kPrimInt, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001328 } else {
David Brazdil74eb1b22015-12-14 11:44:01 +00001329 move.AddMove(src, dst, dst_type, nullptr);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001330 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001331 GetMoveResolver()->EmitNativeCode(&move);
Calin Juravlee460d1d2015-09-29 04:52:17 +01001332}
1333
1334void CodeGeneratorX86::AddLocationAsTemp(Location location, LocationSummary* locations) {
1335 if (location.IsRegister()) {
1336 locations->AddTemp(location);
1337 } else if (location.IsRegisterPair()) {
1338 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1339 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
1340 } else {
1341 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1342 }
1343}
1344
David Brazdilfc6a86a2015-06-26 10:33:45 +00001345void InstructionCodeGeneratorX86::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001346 DCHECK(!successor->IsExitBlock());
1347
1348 HBasicBlock* block = got->GetBlock();
1349 HInstruction* previous = got->GetPrevious();
1350
1351 HLoopInformation* info = block->GetLoopInformation();
David Brazdil46e2a392015-03-16 17:31:52 +00001352 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01001353 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
1354 return;
1355 }
1356
1357 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
1358 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
1359 }
1360 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001361 __ jmp(codegen_->GetLabelOf(successor));
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001362 }
1363}
1364
David Brazdilfc6a86a2015-06-26 10:33:45 +00001365void LocationsBuilderX86::VisitGoto(HGoto* got) {
1366 got->SetLocations(nullptr);
1367}
1368
1369void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
1370 HandleGoto(got, got->GetSuccessor());
1371}
1372
1373void LocationsBuilderX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1374 try_boundary->SetLocations(nullptr);
1375}
1376
1377void InstructionCodeGeneratorX86::VisitTryBoundary(HTryBoundary* try_boundary) {
1378 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
1379 if (!successor->IsExitBlock()) {
1380 HandleGoto(try_boundary, successor);
1381 }
1382}
1383
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001384void LocationsBuilderX86::VisitExit(HExit* exit) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00001385 exit->SetLocations(nullptr);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001386}
1387
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001388void InstructionCodeGeneratorX86::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00001389}
1390
Mark Mendell152408f2015-12-31 12:28:50 -05001391template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001392void InstructionCodeGeneratorX86::GenerateFPJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001393 LabelType* true_label,
1394 LabelType* false_label) {
Roland Levillain4fa13f62015-07-06 18:11:54 +01001395 if (cond->IsFPConditionTrueIfNaN()) {
1396 __ j(kUnordered, true_label);
1397 } else if (cond->IsFPConditionFalseIfNaN()) {
1398 __ j(kUnordered, false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001399 }
Roland Levillain4fa13f62015-07-06 18:11:54 +01001400 __ j(X86UnsignedOrFPCondition(cond->GetCondition()), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001401}
1402
Mark Mendell152408f2015-12-31 12:28:50 -05001403template<class LabelType>
Mark Mendellc4701932015-04-10 13:18:51 -04001404void InstructionCodeGeneratorX86::GenerateLongComparesAndJumps(HCondition* cond,
Mark Mendell152408f2015-12-31 12:28:50 -05001405 LabelType* true_label,
1406 LabelType* false_label) {
Mark Mendellc4701932015-04-10 13:18:51 -04001407 LocationSummary* locations = cond->GetLocations();
1408 Location left = locations->InAt(0);
1409 Location right = locations->InAt(1);
1410 IfCondition if_cond = cond->GetCondition();
1411
Mark Mendellc4701932015-04-10 13:18:51 -04001412 Register left_high = left.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001413 Register left_low = left.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001414 IfCondition true_high_cond = if_cond;
1415 IfCondition false_high_cond = cond->GetOppositeCondition();
Aart Bike9f37602015-10-09 11:15:55 -07001416 Condition final_condition = X86UnsignedOrFPCondition(if_cond); // unsigned on lower part
Mark Mendellc4701932015-04-10 13:18:51 -04001417
1418 // Set the conditions for the test, remembering that == needs to be
1419 // decided using the low words.
1420 switch (if_cond) {
1421 case kCondEQ:
Mark Mendellc4701932015-04-10 13:18:51 -04001422 case kCondNE:
Roland Levillain4fa13f62015-07-06 18:11:54 +01001423 // Nothing to do.
Mark Mendellc4701932015-04-10 13:18:51 -04001424 break;
1425 case kCondLT:
1426 false_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001427 break;
1428 case kCondLE:
1429 true_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001430 break;
1431 case kCondGT:
1432 false_high_cond = kCondLT;
Mark Mendellc4701932015-04-10 13:18:51 -04001433 break;
1434 case kCondGE:
1435 true_high_cond = kCondGT;
Mark Mendellc4701932015-04-10 13:18:51 -04001436 break;
Aart Bike9f37602015-10-09 11:15:55 -07001437 case kCondB:
1438 false_high_cond = kCondA;
1439 break;
1440 case kCondBE:
1441 true_high_cond = kCondB;
1442 break;
1443 case kCondA:
1444 false_high_cond = kCondB;
1445 break;
1446 case kCondAE:
1447 true_high_cond = kCondA;
1448 break;
Mark Mendellc4701932015-04-10 13:18:51 -04001449 }
1450
1451 if (right.IsConstant()) {
1452 int64_t value = right.GetConstant()->AsLongConstant()->GetValue();
Mark Mendellc4701932015-04-10 13:18:51 -04001453 int32_t val_high = High32Bits(value);
Roland Levillain4fa13f62015-07-06 18:11:54 +01001454 int32_t val_low = Low32Bits(value);
Mark Mendellc4701932015-04-10 13:18:51 -04001455
Aart Bika19616e2016-02-01 18:57:58 -08001456 codegen_->Compare32BitValue(left_high, val_high);
Mark Mendellc4701932015-04-10 13:18:51 -04001457 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001458 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001459 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001460 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001461 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001462 __ j(X86Condition(true_high_cond), true_label);
1463 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001464 }
1465 // Must be equal high, so compare the lows.
Aart Bika19616e2016-02-01 18:57:58 -08001466 codegen_->Compare32BitValue(left_low, val_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001467 } else if (right.IsRegisterPair()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001468 Register right_high = right.AsRegisterPairHigh<Register>();
Roland Levillain4fa13f62015-07-06 18:11:54 +01001469 Register right_low = right.AsRegisterPairLow<Register>();
Mark Mendellc4701932015-04-10 13:18:51 -04001470
1471 __ cmpl(left_high, right_high);
1472 if (if_cond == kCondNE) {
Aart Bike9f37602015-10-09 11:15:55 -07001473 __ j(X86Condition(true_high_cond), true_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001474 } else if (if_cond == kCondEQ) {
Aart Bike9f37602015-10-09 11:15:55 -07001475 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001476 } else {
Aart Bike9f37602015-10-09 11:15:55 -07001477 __ j(X86Condition(true_high_cond), true_label);
1478 __ j(X86Condition(false_high_cond), false_label);
Mark Mendellc4701932015-04-10 13:18:51 -04001479 }
1480 // Must be equal high, so compare the lows.
1481 __ cmpl(left_low, right_low);
Mark Mendell8659e842016-02-16 10:41:46 -05001482 } else {
1483 DCHECK(right.IsDoubleStackSlot());
1484 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1485 if (if_cond == kCondNE) {
1486 __ j(X86Condition(true_high_cond), true_label);
1487 } else if (if_cond == kCondEQ) {
1488 __ j(X86Condition(false_high_cond), false_label);
1489 } else {
1490 __ j(X86Condition(true_high_cond), true_label);
1491 __ j(X86Condition(false_high_cond), false_label);
1492 }
1493 // Must be equal high, so compare the lows.
1494 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Mark Mendellc4701932015-04-10 13:18:51 -04001495 }
1496 // The last comparison might be unsigned.
1497 __ j(final_condition, true_label);
1498}
1499
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001500void InstructionCodeGeneratorX86::GenerateFPCompare(Location lhs,
1501 Location rhs,
1502 HInstruction* insn,
1503 bool is_double) {
1504 HX86LoadFromConstantTable* const_area = insn->InputAt(1)->AsX86LoadFromConstantTable();
1505 if (is_double) {
1506 if (rhs.IsFpuRegister()) {
1507 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1508 } else if (const_area != nullptr) {
1509 DCHECK(const_area->IsEmittedAtUseSite());
1510 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(),
1511 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001512 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
1513 const_area->GetBaseMethodAddress(),
1514 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001515 } else {
1516 DCHECK(rhs.IsDoubleStackSlot());
1517 __ ucomisd(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1518 }
1519 } else {
1520 if (rhs.IsFpuRegister()) {
1521 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), rhs.AsFpuRegister<XmmRegister>());
1522 } else if (const_area != nullptr) {
1523 DCHECK(const_area->IsEmittedAtUseSite());
1524 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(),
1525 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00001526 const_area->GetConstant()->AsFloatConstant()->GetValue(),
1527 const_area->GetBaseMethodAddress(),
1528 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001529 } else {
1530 DCHECK(rhs.IsStackSlot());
1531 __ ucomiss(lhs.AsFpuRegister<XmmRegister>(), Address(ESP, rhs.GetStackIndex()));
1532 }
1533 }
1534}
1535
Mark Mendell152408f2015-12-31 12:28:50 -05001536template<class LabelType>
David Brazdil0debae72015-11-12 18:37:00 +00001537void InstructionCodeGeneratorX86::GenerateCompareTestAndBranch(HCondition* condition,
Mark Mendell152408f2015-12-31 12:28:50 -05001538 LabelType* true_target_in,
1539 LabelType* false_target_in) {
David Brazdil0debae72015-11-12 18:37:00 +00001540 // Generated branching requires both targets to be explicit. If either of the
1541 // targets is nullptr (fallthrough) use and bind `fallthrough_target` instead.
Mark Mendell152408f2015-12-31 12:28:50 -05001542 LabelType fallthrough_target;
1543 LabelType* true_target = true_target_in == nullptr ? &fallthrough_target : true_target_in;
1544 LabelType* false_target = false_target_in == nullptr ? &fallthrough_target : false_target_in;
David Brazdil0debae72015-11-12 18:37:00 +00001545
Mark Mendellc4701932015-04-10 13:18:51 -04001546 LocationSummary* locations = condition->GetLocations();
1547 Location left = locations->InAt(0);
1548 Location right = locations->InAt(1);
1549
Mark Mendellc4701932015-04-10 13:18:51 -04001550 Primitive::Type type = condition->InputAt(0)->GetType();
1551 switch (type) {
1552 case Primitive::kPrimLong:
1553 GenerateLongComparesAndJumps(condition, true_target, false_target);
1554 break;
1555 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001556 GenerateFPCompare(left, right, condition, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001557 GenerateFPJumps(condition, true_target, false_target);
1558 break;
1559 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001560 GenerateFPCompare(left, right, condition, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001561 GenerateFPJumps(condition, true_target, false_target);
1562 break;
1563 default:
1564 LOG(FATAL) << "Unexpected compare type " << type;
1565 }
1566
David Brazdil0debae72015-11-12 18:37:00 +00001567 if (false_target != &fallthrough_target) {
Mark Mendellc4701932015-04-10 13:18:51 -04001568 __ jmp(false_target);
1569 }
David Brazdil0debae72015-11-12 18:37:00 +00001570
1571 if (fallthrough_target.IsLinked()) {
1572 __ Bind(&fallthrough_target);
1573 }
Mark Mendellc4701932015-04-10 13:18:51 -04001574}
1575
David Brazdil0debae72015-11-12 18:37:00 +00001576static bool AreEflagsSetFrom(HInstruction* cond, HInstruction* branch) {
1577 // Moves may affect the eflags register (move zero uses xorl), so the EFLAGS
1578 // are set only strictly before `branch`. We can't use the eflags on long/FP
1579 // conditions if they are materialized due to the complex branching.
1580 return cond->IsCondition() &&
1581 cond->GetNext() == branch &&
1582 cond->InputAt(0)->GetType() != Primitive::kPrimLong &&
1583 !Primitive::IsFloatingPointType(cond->InputAt(0)->GetType());
1584}
1585
Mark Mendell152408f2015-12-31 12:28:50 -05001586template<class LabelType>
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001587void InstructionCodeGeneratorX86::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00001588 size_t condition_input_index,
Mark Mendell152408f2015-12-31 12:28:50 -05001589 LabelType* true_target,
1590 LabelType* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00001591 HInstruction* cond = instruction->InputAt(condition_input_index);
1592
1593 if (true_target == nullptr && false_target == nullptr) {
1594 // Nothing to do. The code always falls through.
1595 return;
1596 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00001597 // Constant condition, statically compared against "true" (integer value 1).
1598 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00001599 if (true_target != nullptr) {
1600 __ jmp(true_target);
Nicolas Geoffray18efde52014-09-22 15:51:11 +01001601 }
Nicolas Geoffray360231a2014-10-08 21:07:48 +01001602 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00001603 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00001604 if (false_target != nullptr) {
1605 __ jmp(false_target);
1606 }
1607 }
1608 return;
1609 }
1610
1611 // The following code generates these patterns:
1612 // (1) true_target == nullptr && false_target != nullptr
1613 // - opposite condition true => branch to false_target
1614 // (2) true_target != nullptr && false_target == nullptr
1615 // - condition true => branch to true_target
1616 // (3) true_target != nullptr && false_target != nullptr
1617 // - condition true => branch to true_target
1618 // - branch to false_target
1619 if (IsBooleanValueOrMaterializedCondition(cond)) {
1620 if (AreEflagsSetFrom(cond, instruction)) {
1621 if (true_target == nullptr) {
1622 __ j(X86Condition(cond->AsCondition()->GetOppositeCondition()), false_target);
1623 } else {
1624 __ j(X86Condition(cond->AsCondition()->GetCondition()), true_target);
1625 }
1626 } else {
1627 // Materialized condition, compare against 0.
1628 Location lhs = instruction->GetLocations()->InAt(condition_input_index);
1629 if (lhs.IsRegister()) {
1630 __ testl(lhs.AsRegister<Register>(), lhs.AsRegister<Register>());
1631 } else {
1632 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
1633 }
1634 if (true_target == nullptr) {
1635 __ j(kEqual, false_target);
1636 } else {
1637 __ j(kNotEqual, true_target);
1638 }
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01001639 }
1640 } else {
David Brazdil0debae72015-11-12 18:37:00 +00001641 // Condition has not been materialized, use its inputs as the comparison and
1642 // its condition as the branch condition.
Mark Mendellb8b97692015-05-22 16:58:19 -04001643 HCondition* condition = cond->AsCondition();
David Brazdil0debae72015-11-12 18:37:00 +00001644
1645 // If this is a long or FP comparison that has been folded into
1646 // the HCondition, generate the comparison directly.
1647 Primitive::Type type = condition->InputAt(0)->GetType();
1648 if (type == Primitive::kPrimLong || Primitive::IsFloatingPointType(type)) {
1649 GenerateCompareTestAndBranch(condition, true_target, false_target);
1650 return;
1651 }
1652
1653 Location lhs = condition->GetLocations()->InAt(0);
1654 Location rhs = condition->GetLocations()->InAt(1);
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001655 // LHS is guaranteed to be in a register (see LocationsBuilderX86::HandleCondition).
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01001656 codegen_->GenerateIntCompare(lhs, rhs);
David Brazdil0debae72015-11-12 18:37:00 +00001657 if (true_target == nullptr) {
1658 __ j(X86Condition(condition->GetOppositeCondition()), false_target);
1659 } else {
Mark Mendellb8b97692015-05-22 16:58:19 -04001660 __ j(X86Condition(condition->GetCondition()), true_target);
Dave Allison20dfc792014-06-16 20:44:29 -07001661 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001662 }
David Brazdil0debae72015-11-12 18:37:00 +00001663
1664 // If neither branch falls through (case 3), the conditional branch to `true_target`
1665 // was already emitted (case 2) and we need to emit a jump to `false_target`.
1666 if (true_target != nullptr && false_target != nullptr) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001667 __ jmp(false_target);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001668 }
1669}
1670
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001671void LocationsBuilderX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001672 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
1673 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001674 locations->SetInAt(0, Location::Any());
1675 }
1676}
1677
1678void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00001679 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
1680 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
1681 Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
1682 nullptr : codegen_->GetLabelOf(true_successor);
1683 Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
1684 nullptr : codegen_->GetLabelOf(false_successor);
1685 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001686}
1687
1688void LocationsBuilderX86::VisitDeoptimize(HDeoptimize* deoptimize) {
1689 LocationSummary* locations = new (GetGraph()->GetArena())
1690 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01001691 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00001692 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001693 locations->SetInAt(0, Location::Any());
1694 }
1695}
1696
1697void InstructionCodeGeneratorX86::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08001698 SlowPathCode* slow_path = deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathX86>(deoptimize);
David Brazdil74eb1b22015-12-14 11:44:01 +00001699 GenerateTestAndBranch<Label>(deoptimize,
1700 /* condition_input_index */ 0,
1701 slow_path->GetEntryLabel(),
1702 /* false_target */ nullptr);
1703}
1704
Mingyao Yang063fc772016-08-02 11:02:54 -07001705void LocationsBuilderX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1706 LocationSummary* locations = new (GetGraph()->GetArena())
1707 LocationSummary(flag, LocationSummary::kNoCall);
1708 locations->SetOut(Location::RequiresRegister());
1709}
1710
1711void InstructionCodeGeneratorX86::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
1712 __ movl(flag->GetLocations()->Out().AsRegister<Register>(),
1713 Address(ESP, codegen_->GetStackOffsetOfShouldDeoptimizeFlag()));
1714}
1715
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001716static bool SelectCanUseCMOV(HSelect* select) {
1717 // There are no conditional move instructions for XMMs.
1718 if (Primitive::IsFloatingPointType(select->GetType())) {
1719 return false;
1720 }
1721
1722 // A FP condition doesn't generate the single CC that we need.
1723 // In 32 bit mode, a long condition doesn't generate a single CC either.
1724 HInstruction* condition = select->GetCondition();
1725 if (condition->IsCondition()) {
1726 Primitive::Type compare_type = condition->InputAt(0)->GetType();
1727 if (compare_type == Primitive::kPrimLong ||
1728 Primitive::IsFloatingPointType(compare_type)) {
1729 return false;
1730 }
1731 }
1732
1733 // We can generate a CMOV for this Select.
1734 return true;
1735}
1736
David Brazdil74eb1b22015-12-14 11:44:01 +00001737void LocationsBuilderX86::VisitSelect(HSelect* select) {
1738 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001739 if (Primitive::IsFloatingPointType(select->GetType())) {
David Brazdil74eb1b22015-12-14 11:44:01 +00001740 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001741 locations->SetInAt(1, Location::Any());
David Brazdil74eb1b22015-12-14 11:44:01 +00001742 } else {
1743 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001744 if (SelectCanUseCMOV(select)) {
1745 if (select->InputAt(1)->IsConstant()) {
1746 // Cmov can't handle a constant value.
1747 locations->SetInAt(1, Location::RequiresRegister());
1748 } else {
1749 locations->SetInAt(1, Location::Any());
1750 }
1751 } else {
1752 locations->SetInAt(1, Location::Any());
1753 }
David Brazdil74eb1b22015-12-14 11:44:01 +00001754 }
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001755 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
1756 locations->SetInAt(2, Location::RequiresRegister());
David Brazdil74eb1b22015-12-14 11:44:01 +00001757 }
1758 locations->SetOut(Location::SameAsFirstInput());
1759}
1760
1761void InstructionCodeGeneratorX86::VisitSelect(HSelect* select) {
1762 LocationSummary* locations = select->GetLocations();
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001763 DCHECK(locations->InAt(0).Equals(locations->Out()));
1764 if (SelectCanUseCMOV(select)) {
1765 // If both the condition and the source types are integer, we can generate
1766 // a CMOV to implement Select.
1767
1768 HInstruction* select_condition = select->GetCondition();
1769 Condition cond = kNotEqual;
1770
1771 // Figure out how to test the 'condition'.
1772 if (select_condition->IsCondition()) {
1773 HCondition* condition = select_condition->AsCondition();
1774 if (!condition->IsEmittedAtUseSite()) {
1775 // This was a previously materialized condition.
1776 // Can we use the existing condition code?
1777 if (AreEflagsSetFrom(condition, select)) {
1778 // Materialization was the previous instruction. Condition codes are right.
1779 cond = X86Condition(condition->GetCondition());
1780 } else {
1781 // No, we have to recreate the condition code.
1782 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1783 __ testl(cond_reg, cond_reg);
1784 }
1785 } else {
1786 // We can't handle FP or long here.
1787 DCHECK_NE(condition->InputAt(0)->GetType(), Primitive::kPrimLong);
1788 DCHECK(!Primitive::IsFloatingPointType(condition->InputAt(0)->GetType()));
1789 LocationSummary* cond_locations = condition->GetLocations();
Roland Levillain0b671c02016-08-19 12:02:34 +01001790 codegen_->GenerateIntCompare(cond_locations->InAt(0), cond_locations->InAt(1));
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001791 cond = X86Condition(condition->GetCondition());
1792 }
1793 } else {
Roland Levillain5e8d5f02016-10-18 18:03:43 +01001794 // Must be a Boolean condition, which needs to be compared to 0.
Mark Mendell0c5b18e2016-02-06 13:58:35 -05001795 Register cond_reg = locations->InAt(2).AsRegister<Register>();
1796 __ testl(cond_reg, cond_reg);
1797 }
1798
1799 // If the condition is true, overwrite the output, which already contains false.
1800 Location false_loc = locations->InAt(0);
1801 Location true_loc = locations->InAt(1);
1802 if (select->GetType() == Primitive::kPrimLong) {
1803 // 64 bit conditional move.
1804 Register false_high = false_loc.AsRegisterPairHigh<Register>();
1805 Register false_low = false_loc.AsRegisterPairLow<Register>();
1806 if (true_loc.IsRegisterPair()) {
1807 __ cmovl(cond, false_high, true_loc.AsRegisterPairHigh<Register>());
1808 __ cmovl(cond, false_low, true_loc.AsRegisterPairLow<Register>());
1809 } else {
1810 __ cmovl(cond, false_high, Address(ESP, true_loc.GetHighStackIndex(kX86WordSize)));
1811 __ cmovl(cond, false_low, Address(ESP, true_loc.GetStackIndex()));
1812 }
1813 } else {
1814 // 32 bit conditional move.
1815 Register false_reg = false_loc.AsRegister<Register>();
1816 if (true_loc.IsRegister()) {
1817 __ cmovl(cond, false_reg, true_loc.AsRegister<Register>());
1818 } else {
1819 __ cmovl(cond, false_reg, Address(ESP, true_loc.GetStackIndex()));
1820 }
1821 }
1822 } else {
1823 NearLabel false_target;
1824 GenerateTestAndBranch<NearLabel>(
1825 select, /* condition_input_index */ 2, /* true_target */ nullptr, &false_target);
1826 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
1827 __ Bind(&false_target);
1828 }
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001829}
1830
David Srbecky0cf44932015-12-09 14:09:59 +00001831void LocationsBuilderX86::VisitNativeDebugInfo(HNativeDebugInfo* info) {
1832 new (GetGraph()->GetArena()) LocationSummary(info);
1833}
1834
David Srbeckyd28f4a02016-03-14 17:14:24 +00001835void InstructionCodeGeneratorX86::VisitNativeDebugInfo(HNativeDebugInfo*) {
1836 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00001837}
1838
1839void CodeGeneratorX86::GenerateNop() {
1840 __ nop();
David Srbecky0cf44932015-12-09 14:09:59 +00001841}
1842
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001843void LocationsBuilderX86::HandleCondition(HCondition* cond) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01001844 LocationSummary* locations =
Roland Levillain0d37cd02015-05-27 16:39:19 +01001845 new (GetGraph()->GetArena()) LocationSummary(cond, LocationSummary::kNoCall);
Mark Mendellc4701932015-04-10 13:18:51 -04001846 // Handle the long/FP comparisons made in instruction simplification.
1847 switch (cond->InputAt(0)->GetType()) {
1848 case Primitive::kPrimLong: {
1849 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendell8659e842016-02-16 10:41:46 -05001850 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001851 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001852 locations->SetOut(Location::RequiresRegister());
1853 }
1854 break;
1855 }
1856 case Primitive::kPrimFloat:
1857 case Primitive::kPrimDouble: {
1858 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001859 if (cond->InputAt(1)->IsX86LoadFromConstantTable()) {
1860 DCHECK(cond->InputAt(1)->IsEmittedAtUseSite());
1861 } else if (cond->InputAt(1)->IsConstant()) {
1862 locations->SetInAt(1, Location::RequiresFpuRegister());
1863 } else {
1864 locations->SetInAt(1, Location::Any());
1865 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001866 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001867 locations->SetOut(Location::RequiresRegister());
1868 }
1869 break;
1870 }
1871 default:
1872 locations->SetInAt(0, Location::RequiresRegister());
1873 locations->SetInAt(1, Location::Any());
David Brazdilb3e773e2016-01-26 11:28:37 +00001874 if (!cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001875 // We need a byte register.
1876 locations->SetOut(Location::RegisterLocation(ECX));
1877 }
1878 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01001879 }
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00001880}
1881
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001882void InstructionCodeGeneratorX86::HandleCondition(HCondition* cond) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001883 if (cond->IsEmittedAtUseSite()) {
Mark Mendellc4701932015-04-10 13:18:51 -04001884 return;
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01001885 }
Mark Mendellc4701932015-04-10 13:18:51 -04001886
1887 LocationSummary* locations = cond->GetLocations();
1888 Location lhs = locations->InAt(0);
1889 Location rhs = locations->InAt(1);
1890 Register reg = locations->Out().AsRegister<Register>();
Mark Mendell152408f2015-12-31 12:28:50 -05001891 NearLabel true_label, false_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001892
1893 switch (cond->InputAt(0)->GetType()) {
1894 default: {
1895 // Integer case.
1896
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01001897 // Clear output register: setb only sets the low byte.
Mark Mendellc4701932015-04-10 13:18:51 -04001898 __ xorl(reg, reg);
Roland Levillain0b671c02016-08-19 12:02:34 +01001899 codegen_->GenerateIntCompare(lhs, rhs);
Aart Bike9f37602015-10-09 11:15:55 -07001900 __ setb(X86Condition(cond->GetCondition()), reg);
Mark Mendellc4701932015-04-10 13:18:51 -04001901 return;
1902 }
1903 case Primitive::kPrimLong:
1904 GenerateLongComparesAndJumps(cond, &true_label, &false_label);
1905 break;
1906 case Primitive::kPrimFloat:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001907 GenerateFPCompare(lhs, rhs, cond, false);
Mark Mendellc4701932015-04-10 13:18:51 -04001908 GenerateFPJumps(cond, &true_label, &false_label);
1909 break;
1910 case Primitive::kPrimDouble:
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00001911 GenerateFPCompare(lhs, rhs, cond, true);
Mark Mendellc4701932015-04-10 13:18:51 -04001912 GenerateFPJumps(cond, &true_label, &false_label);
1913 break;
1914 }
1915
1916 // Convert the jumps into the result.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001917 NearLabel done_label;
Mark Mendellc4701932015-04-10 13:18:51 -04001918
Roland Levillain4fa13f62015-07-06 18:11:54 +01001919 // False case: result = 0.
Mark Mendellc4701932015-04-10 13:18:51 -04001920 __ Bind(&false_label);
1921 __ xorl(reg, reg);
1922 __ jmp(&done_label);
1923
Roland Levillain4fa13f62015-07-06 18:11:54 +01001924 // True case: result = 1.
Mark Mendellc4701932015-04-10 13:18:51 -04001925 __ Bind(&true_label);
1926 __ movl(reg, Immediate(1));
1927 __ Bind(&done_label);
Dave Allison20dfc792014-06-16 20:44:29 -07001928}
1929
1930void LocationsBuilderX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001931 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001932}
1933
1934void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001935 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001936}
1937
1938void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001939 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001940}
1941
1942void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001943 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001944}
1945
1946void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001947 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001948}
1949
1950void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001951 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001952}
1953
1954void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001955 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001956}
1957
1958void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001959 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001960}
1961
1962void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001963 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001964}
1965
1966void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001967 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001968}
1969
1970void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001971 HandleCondition(comp);
Dave Allison20dfc792014-06-16 20:44:29 -07001972}
1973
1974void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001975 HandleCondition(comp);
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00001976}
1977
Aart Bike9f37602015-10-09 11:15:55 -07001978void LocationsBuilderX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001979 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001980}
1981
1982void InstructionCodeGeneratorX86::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001983 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001984}
1985
1986void LocationsBuilderX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001987 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001988}
1989
1990void InstructionCodeGeneratorX86::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001991 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001992}
1993
1994void LocationsBuilderX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001995 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07001996}
1997
1998void InstructionCodeGeneratorX86::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001999 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002000}
2001
2002void LocationsBuilderX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002003 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002004}
2005
2006void InstructionCodeGeneratorX86::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002007 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07002008}
2009
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002010void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002011 LocationSummary* locations =
2012 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002013 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002014}
2015
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002016void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
Roland Levillain3a3fd0f2014-10-10 13:56:31 +01002017 // Will be generated at use site.
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002018}
2019
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002020void LocationsBuilderX86::VisitNullConstant(HNullConstant* constant) {
2021 LocationSummary* locations =
2022 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2023 locations->SetOut(Location::ConstantLocation(constant));
2024}
2025
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002026void InstructionCodeGeneratorX86::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002027 // Will be generated at use site.
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00002028}
2029
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002030void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002031 LocationSummary* locations =
2032 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01002033 locations->SetOut(Location::ConstantLocation(constant));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002034}
2035
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002036void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002037 // Will be generated at use site.
2038}
2039
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002040void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
2041 LocationSummary* locations =
2042 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2043 locations->SetOut(Location::ConstantLocation(constant));
2044}
2045
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002046void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002047 // Will be generated at use site.
2048}
2049
2050void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
2051 LocationSummary* locations =
2052 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2053 locations->SetOut(Location::ConstantLocation(constant));
2054}
2055
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002056void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant ATTRIBUTE_UNUSED) {
Nicolas Geoffray102cbed2014-10-15 18:31:05 +01002057 // Will be generated at use site.
2058}
2059
Igor Murashkind01745e2017-04-05 16:40:31 -07002060void LocationsBuilderX86::VisitConstructorFence(HConstructorFence* constructor_fence) {
2061 constructor_fence->SetLocations(nullptr);
2062}
2063
2064void InstructionCodeGeneratorX86::VisitConstructorFence(
2065 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
2066 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2067}
2068
Calin Juravle27df7582015-04-17 19:12:31 +01002069void LocationsBuilderX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
2070 memory_barrier->SetLocations(nullptr);
2071}
2072
2073void InstructionCodeGeneratorX86::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002074 codegen_->GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
Calin Juravle27df7582015-04-17 19:12:31 +01002075}
2076
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002077void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002078 ret->SetLocations(nullptr);
Nicolas Geoffray3ff386a2014-03-04 14:46:47 +00002079}
2080
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01002081void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002082 codegen_->GenerateFrameExit();
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00002083}
2084
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002085void LocationsBuilderX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002086 LocationSummary* locations =
2087 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002088 switch (ret->InputAt(0)->GetType()) {
2089 case Primitive::kPrimBoolean:
2090 case Primitive::kPrimByte:
2091 case Primitive::kPrimChar:
2092 case Primitive::kPrimShort:
2093 case Primitive::kPrimInt:
2094 case Primitive::kPrimNot:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002095 locations->SetInAt(0, Location::RegisterLocation(EAX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002096 break;
2097
2098 case Primitive::kPrimLong:
2099 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002100 0, Location::RegisterPairLocation(EAX, EDX));
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002101 break;
2102
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002103 case Primitive::kPrimFloat:
2104 case Primitive::kPrimDouble:
2105 locations->SetInAt(
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002106 0, Location::FpuRegisterLocation(XMM0));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002107 break;
2108
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002109 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002110 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002111 }
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002112}
2113
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002114void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002115 if (kIsDebugBuild) {
2116 switch (ret->InputAt(0)->GetType()) {
2117 case Primitive::kPrimBoolean:
2118 case Primitive::kPrimByte:
2119 case Primitive::kPrimChar:
2120 case Primitive::kPrimShort:
2121 case Primitive::kPrimInt:
2122 case Primitive::kPrimNot:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002123 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002124 break;
2125
2126 case Primitive::kPrimLong:
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01002127 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
2128 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002129 break;
2130
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002131 case Primitive::kPrimFloat:
2132 case Primitive::kPrimDouble:
Roland Levillain271ab9c2014-11-27 15:23:57 +00002133 DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002134 break;
2135
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002136 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002137 LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002138 }
2139 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +00002140 codegen_->GenerateFrameExit();
Nicolas Geoffraybab4ed72014-03-11 17:53:17 +00002141}
2142
Calin Juravle175dc732015-08-25 15:42:32 +01002143void LocationsBuilderX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2144 // The trampoline uses the same calling convention as dex calling conventions,
2145 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2146 // the method_idx.
2147 HandleInvoke(invoke);
2148}
2149
2150void InstructionCodeGeneratorX86::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2151 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2152}
2153
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002154void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002155 // Explicit clinit checks triggered by static invokes must have been pruned by
2156 // art::PrepareForRegisterAllocation.
2157 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002158
Mark Mendellfb8d2792015-03-31 22:16:59 -04002159 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002160 if (intrinsic.TryDispatch(invoke)) {
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002161 if (invoke->GetLocations()->CanCall() && invoke->HasPcRelativeDexCache()) {
Vladimir Markoc53c0792015-11-19 15:48:33 +00002162 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002163 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04002164 return;
2165 }
2166
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002167 HandleInvoke(invoke);
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002168
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002169 // For PC-relative dex cache the invoke has an extra input, the PC-relative address base.
2170 if (invoke->HasPcRelativeDexCache()) {
Vladimir Markob4536b72015-11-24 13:45:23 +00002171 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00002172 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002173}
2174
Mark Mendell09ed1a32015-03-25 08:30:06 -04002175static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorX86* codegen) {
2176 if (invoke->GetLocations()->Intrinsified()) {
2177 IntrinsicCodeGeneratorX86 intrinsic(codegen);
2178 intrinsic.Dispatch(invoke);
2179 return true;
2180 }
2181 return false;
2182}
2183
Nicolas Geoffraye53798a2014-12-01 10:31:54 +00002184void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002185 // Explicit clinit checks triggered by static invokes must have been pruned by
2186 // art::PrepareForRegisterAllocation.
2187 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Roland Levillain4c0eb422015-04-24 16:43:49 +01002188
Mark Mendell09ed1a32015-03-25 08:30:06 -04002189 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2190 return;
Nicolas Geoffray1cf95282014-12-12 19:22:03 +00002191 }
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002192
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002193 LocationSummary* locations = invoke->GetLocations();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002194 codegen_->GenerateStaticOrDirectCall(
Nicolas Geoffray94015b92015-06-04 18:21:04 +01002195 invoke, locations->HasTemps() ? locations->GetTemp(0) : Location::NoLocation());
Mingyao Yang8693fe12015-04-17 16:51:08 -07002196 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002197}
2198
2199void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00002200 IntrinsicLocationsBuilderX86 intrinsic(codegen_);
2201 if (intrinsic.TryDispatch(invoke)) {
2202 return;
2203 }
2204
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002205 HandleInvoke(invoke);
2206}
2207
2208void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +01002209 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Nicolas Geoffrayfd88f162015-06-03 11:23:52 +01002210 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002211}
2212
Nicolas Geoffraye982f0b2014-08-13 02:11:24 +01002213void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002214 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
2215 return;
2216 }
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002217
Andreas Gampebfb5ba92015-09-01 15:45:02 +00002218 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Nicolas Geoffrayf12feb82014-07-17 18:32:41 +01002219 DCHECK(!codegen_->IsLeafMethod());
Nicolas Geoffray39468442014-09-02 15:17:15 +01002220 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +00002221}
2222
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002223void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002224 // This call to HandleInvoke allocates a temporary (core) register
2225 // which is also used to transfer the hidden argument from FP to
2226 // core register.
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002227 HandleInvoke(invoke);
2228 // Add the hidden argument.
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002229 invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM7));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002230}
2231
2232void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
2233 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
Roland Levillain0d5a2812015-11-13 10:07:31 +00002234 LocationSummary* locations = invoke->GetLocations();
2235 Register temp = locations->GetTemp(0).AsRegister<Register>();
2236 XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002237 Location receiver = locations->InAt(0);
2238 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2239
Roland Levillain0d5a2812015-11-13 10:07:31 +00002240 // Set the hidden argument. This is safe to do this here, as XMM7
2241 // won't be modified thereafter, before the `call` instruction.
2242 DCHECK_EQ(XMM7, hidden_reg);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002243 __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002244 __ movd(hidden_reg, temp);
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002245
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002246 if (receiver.IsStackSlot()) {
2247 __ movl(temp, Address(ESP, receiver.GetStackIndex()));
Roland Levillain0d5a2812015-11-13 10:07:31 +00002248 // /* HeapReference<Class> */ temp = temp->klass_
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002249 __ movl(temp, Address(temp, class_offset));
2250 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00002251 // /* HeapReference<Class> */ temp = receiver->klass_
Roland Levillain271ab9c2014-11-27 15:23:57 +00002252 __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002253 }
Roland Levillain4d027112015-07-01 15:41:14 +01002254 codegen_->MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00002255 // Instead of simply (possibly) unpoisoning `temp` here, we should
2256 // emit a read barrier for the previous class reference load.
2257 // However this is not required in practice, as this is an
2258 // intermediate/temporary reference and because the current
2259 // concurrent copying collector keeps the from-space memory
2260 // intact/accessible until the end of the marking phase (the
2261 // concurrent copying collector may not in the future).
Roland Levillain4d027112015-07-01 15:41:14 +01002262 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002263 // temp = temp->GetAddressOfIMT()
2264 __ movl(temp,
2265 Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002266 // temp = temp->GetImtEntryAt(method_offset);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002267 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002268 invoke->GetImtIndex(), kX86PointerSize));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002269 __ movl(temp, Address(temp, method_offset));
2270 // call temp->GetEntryPoint();
Roland Levillain0d5a2812015-11-13 10:07:31 +00002271 __ call(Address(temp,
Andreas Gampe542451c2016-07-26 09:02:02 -07002272 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Nicolas Geoffray52839d12014-11-07 17:47:25 +00002273
2274 DCHECK(!codegen_->IsLeafMethod());
2275 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2276}
2277
Orion Hodsonac141392017-01-13 11:53:47 +00002278void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2279 HandleInvoke(invoke);
2280}
2281
2282void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
2283 codegen_->GenerateInvokePolymorphicCall(invoke);
2284}
2285
Roland Levillain88cb1752014-10-20 16:36:47 +01002286void LocationsBuilderX86::VisitNeg(HNeg* neg) {
2287 LocationSummary* locations =
2288 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2289 switch (neg->GetResultType()) {
2290 case Primitive::kPrimInt:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002291 case Primitive::kPrimLong:
Roland Levillain88cb1752014-10-20 16:36:47 +01002292 locations->SetInAt(0, Location::RequiresRegister());
2293 locations->SetOut(Location::SameAsFirstInput());
2294 break;
2295
Roland Levillain88cb1752014-10-20 16:36:47 +01002296 case Primitive::kPrimFloat:
Roland Levillain5368c212014-11-27 15:03:41 +00002297 locations->SetInAt(0, Location::RequiresFpuRegister());
2298 locations->SetOut(Location::SameAsFirstInput());
2299 locations->AddTemp(Location::RequiresRegister());
2300 locations->AddTemp(Location::RequiresFpuRegister());
2301 break;
2302
Roland Levillain88cb1752014-10-20 16:36:47 +01002303 case Primitive::kPrimDouble:
Roland Levillain3dbcb382014-10-28 17:30:07 +00002304 locations->SetInAt(0, Location::RequiresFpuRegister());
Roland Levillain5368c212014-11-27 15:03:41 +00002305 locations->SetOut(Location::SameAsFirstInput());
2306 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain88cb1752014-10-20 16:36:47 +01002307 break;
2308
2309 default:
2310 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2311 }
2312}
2313
2314void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
2315 LocationSummary* locations = neg->GetLocations();
2316 Location out = locations->Out();
2317 Location in = locations->InAt(0);
2318 switch (neg->GetResultType()) {
2319 case Primitive::kPrimInt:
2320 DCHECK(in.IsRegister());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002321 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002322 __ negl(out.AsRegister<Register>());
Roland Levillain88cb1752014-10-20 16:36:47 +01002323 break;
2324
2325 case Primitive::kPrimLong:
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002326 DCHECK(in.IsRegisterPair());
Roland Levillain3dbcb382014-10-28 17:30:07 +00002327 DCHECK(in.Equals(out));
Roland Levillain2e07b4f2014-10-23 18:12:09 +01002328 __ negl(out.AsRegisterPairLow<Register>());
2329 // Negation is similar to subtraction from zero. The least
2330 // significant byte triggers a borrow when it is different from
2331 // zero; to take it into account, add 1 to the most significant
2332 // byte if the carry flag (CF) is set to 1 after the first NEGL
2333 // operation.
2334 __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
2335 __ negl(out.AsRegisterPairHigh<Register>());
2336 break;
2337
Roland Levillain5368c212014-11-27 15:03:41 +00002338 case Primitive::kPrimFloat: {
2339 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002340 Register constant = locations->GetTemp(0).AsRegister<Register>();
2341 XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002342 // Implement float negation with an exclusive or with value
2343 // 0x80000000 (mask for bit 31, representing the sign of a
2344 // single-precision floating-point number).
2345 __ movl(constant, Immediate(INT32_C(0x80000000)));
2346 __ movd(mask, constant);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002347 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain3dbcb382014-10-28 17:30:07 +00002348 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002349 }
Roland Levillain3dbcb382014-10-28 17:30:07 +00002350
Roland Levillain5368c212014-11-27 15:03:41 +00002351 case Primitive::kPrimDouble: {
2352 DCHECK(in.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002353 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Roland Levillain5368c212014-11-27 15:03:41 +00002354 // Implement double negation with an exclusive or with value
2355 // 0x8000000000000000 (mask for bit 63, representing the sign of
2356 // a double-precision floating-point number).
2357 __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
Roland Levillain271ab9c2014-11-27 15:23:57 +00002358 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
Roland Levillain88cb1752014-10-20 16:36:47 +01002359 break;
Roland Levillain5368c212014-11-27 15:03:41 +00002360 }
Roland Levillain88cb1752014-10-20 16:36:47 +01002361
2362 default:
2363 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
2364 }
2365}
2366
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002367void LocationsBuilderX86::VisitX86FPNeg(HX86FPNeg* neg) {
2368 LocationSummary* locations =
2369 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
2370 DCHECK(Primitive::IsFloatingPointType(neg->GetType()));
2371 locations->SetInAt(0, Location::RequiresFpuRegister());
2372 locations->SetInAt(1, Location::RequiresRegister());
2373 locations->SetOut(Location::SameAsFirstInput());
2374 locations->AddTemp(Location::RequiresFpuRegister());
2375}
2376
2377void InstructionCodeGeneratorX86::VisitX86FPNeg(HX86FPNeg* neg) {
2378 LocationSummary* locations = neg->GetLocations();
2379 Location out = locations->Out();
2380 DCHECK(locations->InAt(0).Equals(out));
2381
2382 Register constant_area = locations->InAt(1).AsRegister<Register>();
2383 XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2384 if (neg->GetType() == Primitive::kPrimFloat) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002385 __ movss(mask, codegen_->LiteralInt32Address(INT32_C(0x80000000),
2386 neg->GetBaseMethodAddress(),
2387 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002388 __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
2389 } else {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00002390 __ movsd(mask, codegen_->LiteralInt64Address(INT64_C(0x8000000000000000),
2391 neg->GetBaseMethodAddress(),
2392 constant_area));
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00002393 __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
2394 }
2395}
2396
Roland Levillaindff1f282014-11-05 14:15:05 +00002397void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
Roland Levillaindff1f282014-11-05 14:15:05 +00002398 Primitive::Type result_type = conversion->GetResultType();
2399 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002400 DCHECK_NE(result_type, input_type);
Roland Levillain624279f2014-12-04 11:54:28 +00002401
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002402 // The float-to-long and double-to-long type conversions rely on a
2403 // call to the runtime.
Roland Levillain624279f2014-12-04 11:54:28 +00002404 LocationSummary::CallKind call_kind =
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002405 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble)
2406 && result_type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002407 ? LocationSummary::kCallOnMainOnly
Roland Levillain624279f2014-12-04 11:54:28 +00002408 : LocationSummary::kNoCall;
2409 LocationSummary* locations =
2410 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
2411
David Brazdilb2bd1c52015-03-25 11:17:37 +00002412 // The Java language does not allow treating boolean as an integral type but
2413 // our bit representation makes it safe.
David Brazdil46e2a392015-03-16 17:31:52 +00002414
Roland Levillaindff1f282014-11-05 14:15:05 +00002415 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002416 case Primitive::kPrimByte:
2417 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002418 case Primitive::kPrimLong: {
2419 // Type conversion from long to byte is a result of code transformations.
2420 HInstruction* input = conversion->InputAt(0);
2421 Location input_location = input->IsConstant()
2422 ? Location::ConstantLocation(input->AsConstant())
2423 : Location::RegisterPairLocation(EAX, EDX);
2424 locations->SetInAt(0, input_location);
2425 // Make the output overlap to please the register allocator. This greatly simplifies
2426 // the validation of the linear scan implementation
2427 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2428 break;
2429 }
David Brazdil46e2a392015-03-16 17:31:52 +00002430 case Primitive::kPrimBoolean:
2431 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002432 case Primitive::kPrimShort:
2433 case Primitive::kPrimInt:
2434 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002435 // Processing a Dex `int-to-byte' instruction.
Mark Mendell5f874182015-03-04 15:42:45 -05002436 locations->SetInAt(0, Location::ByteRegisterOrConstant(ECX, conversion->InputAt(0)));
2437 // Make the output overlap to please the register allocator. This greatly simplifies
2438 // the validation of the linear scan implementation
2439 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Roland Levillain51d3fc42014-11-13 14:11:42 +00002440 break;
2441
2442 default:
2443 LOG(FATAL) << "Unexpected type conversion from " << input_type
2444 << " to " << result_type;
2445 }
2446 break;
2447
Roland Levillain01a8d712014-11-14 16:27:39 +00002448 case Primitive::kPrimShort:
2449 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002450 case Primitive::kPrimLong:
2451 // Type conversion from long to short is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002452 case Primitive::kPrimBoolean:
2453 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002454 case Primitive::kPrimByte:
2455 case Primitive::kPrimInt:
2456 case Primitive::kPrimChar:
2457 // Processing a Dex `int-to-short' instruction.
2458 locations->SetInAt(0, Location::Any());
2459 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2460 break;
2461
2462 default:
2463 LOG(FATAL) << "Unexpected type conversion from " << input_type
2464 << " to " << result_type;
2465 }
2466 break;
2467
Roland Levillain946e1432014-11-11 17:35:19 +00002468 case Primitive::kPrimInt:
2469 switch (input_type) {
2470 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002471 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002472 locations->SetInAt(0, Location::Any());
2473 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2474 break;
2475
2476 case Primitive::kPrimFloat:
Roland Levillain3f8f9362014-12-02 17:45:01 +00002477 // Processing a Dex `float-to-int' instruction.
2478 locations->SetInAt(0, Location::RequiresFpuRegister());
2479 locations->SetOut(Location::RequiresRegister());
2480 locations->AddTemp(Location::RequiresFpuRegister());
2481 break;
2482
Roland Levillain946e1432014-11-11 17:35:19 +00002483 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002484 // Processing a Dex `double-to-int' instruction.
2485 locations->SetInAt(0, Location::RequiresFpuRegister());
2486 locations->SetOut(Location::RequiresRegister());
2487 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain946e1432014-11-11 17:35:19 +00002488 break;
2489
2490 default:
2491 LOG(FATAL) << "Unexpected type conversion from " << input_type
2492 << " to " << result_type;
2493 }
2494 break;
2495
Roland Levillaindff1f282014-11-05 14:15:05 +00002496 case Primitive::kPrimLong:
2497 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002498 case Primitive::kPrimBoolean:
2499 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002500 case Primitive::kPrimByte:
2501 case Primitive::kPrimShort:
2502 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002503 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002504 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002505 locations->SetInAt(0, Location::RegisterLocation(EAX));
2506 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2507 break;
2508
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002509 case Primitive::kPrimFloat:
Vladimir Marko949c91f2015-01-27 10:48:44 +00002510 case Primitive::kPrimDouble: {
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002511 // Processing a Dex `float-to-long' or 'double-to-long' instruction.
Vladimir Marko949c91f2015-01-27 10:48:44 +00002512 InvokeRuntimeCallingConvention calling_convention;
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002513 XmmRegister parameter = calling_convention.GetFpuRegisterAt(0);
2514 locations->SetInAt(0, Location::FpuRegisterLocation(parameter));
2515
Vladimir Marko949c91f2015-01-27 10:48:44 +00002516 // The runtime helper puts the result in EAX, EDX.
2517 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Vladimir Marko949c91f2015-01-27 10:48:44 +00002518 }
Mark P Mendell966c3ae2015-01-27 15:45:27 +00002519 break;
Roland Levillaindff1f282014-11-05 14:15:05 +00002520
2521 default:
2522 LOG(FATAL) << "Unexpected type conversion from " << input_type
2523 << " to " << result_type;
2524 }
2525 break;
2526
Roland Levillain981e4542014-11-14 11:47:14 +00002527 case Primitive::kPrimChar:
2528 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002529 case Primitive::kPrimLong:
2530 // Type conversion from long to char is a result of code transformations.
David Brazdil46e2a392015-03-16 17:31:52 +00002531 case Primitive::kPrimBoolean:
2532 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002533 case Primitive::kPrimByte:
2534 case Primitive::kPrimShort:
2535 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002536 // Processing a Dex `int-to-char' instruction.
2537 locations->SetInAt(0, Location::Any());
2538 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2539 break;
2540
2541 default:
2542 LOG(FATAL) << "Unexpected type conversion from " << input_type
2543 << " to " << result_type;
2544 }
2545 break;
2546
Roland Levillaindff1f282014-11-05 14:15:05 +00002547 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002548 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002549 case Primitive::kPrimBoolean:
2550 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002551 case Primitive::kPrimByte:
2552 case Primitive::kPrimShort:
2553 case Primitive::kPrimInt:
2554 case Primitive::kPrimChar:
2555 // Processing a Dex `int-to-float' instruction.
2556 locations->SetInAt(0, Location::RequiresRegister());
2557 locations->SetOut(Location::RequiresFpuRegister());
2558 break;
2559
2560 case Primitive::kPrimLong:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002561 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002562 locations->SetInAt(0, Location::Any());
2563 locations->SetOut(Location::Any());
Roland Levillain6d0e4832014-11-27 18:31:21 +00002564 break;
2565
Roland Levillaincff13742014-11-17 14:32:17 +00002566 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002567 // Processing a Dex `double-to-float' instruction.
2568 locations->SetInAt(0, Location::RequiresFpuRegister());
2569 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002570 break;
2571
2572 default:
2573 LOG(FATAL) << "Unexpected type conversion from " << input_type
2574 << " to " << result_type;
2575 };
2576 break;
2577
Roland Levillaindff1f282014-11-05 14:15:05 +00002578 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002579 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002580 case Primitive::kPrimBoolean:
2581 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002582 case Primitive::kPrimByte:
2583 case Primitive::kPrimShort:
2584 case Primitive::kPrimInt:
2585 case Primitive::kPrimChar:
2586 // Processing a Dex `int-to-double' instruction.
2587 locations->SetInAt(0, Location::RequiresRegister());
2588 locations->SetOut(Location::RequiresFpuRegister());
2589 break;
2590
2591 case Primitive::kPrimLong:
Roland Levillain647b9ed2014-11-27 12:06:00 +00002592 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002593 locations->SetInAt(0, Location::Any());
2594 locations->SetOut(Location::Any());
Roland Levillain647b9ed2014-11-27 12:06:00 +00002595 break;
2596
Roland Levillaincff13742014-11-17 14:32:17 +00002597 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002598 // Processing a Dex `float-to-double' instruction.
2599 locations->SetInAt(0, Location::RequiresFpuRegister());
2600 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Roland Levillaincff13742014-11-17 14:32:17 +00002601 break;
2602
2603 default:
2604 LOG(FATAL) << "Unexpected type conversion from " << input_type
2605 << " to " << result_type;
2606 }
Roland Levillaindff1f282014-11-05 14:15:05 +00002607 break;
2608
2609 default:
2610 LOG(FATAL) << "Unexpected type conversion from " << input_type
2611 << " to " << result_type;
2612 }
2613}
2614
2615void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
2616 LocationSummary* locations = conversion->GetLocations();
2617 Location out = locations->Out();
2618 Location in = locations->InAt(0);
2619 Primitive::Type result_type = conversion->GetResultType();
2620 Primitive::Type input_type = conversion->GetInputType();
Nicolas Geoffray01fcc9e2014-12-01 14:16:20 +00002621 DCHECK_NE(result_type, input_type);
Roland Levillaindff1f282014-11-05 14:15:05 +00002622 switch (result_type) {
Roland Levillain51d3fc42014-11-13 14:11:42 +00002623 case Primitive::kPrimByte:
2624 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002625 case Primitive::kPrimLong:
2626 // Type conversion from long to byte is a result of code transformations.
2627 if (in.IsRegisterPair()) {
2628 __ movsxb(out.AsRegister<Register>(), in.AsRegisterPairLow<ByteRegister>());
2629 } else {
2630 DCHECK(in.GetConstant()->IsLongConstant());
2631 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2632 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2633 }
2634 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002635 case Primitive::kPrimBoolean:
2636 // Boolean input is a result of code transformations.
Roland Levillain51d3fc42014-11-13 14:11:42 +00002637 case Primitive::kPrimShort:
2638 case Primitive::kPrimInt:
2639 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002640 // Processing a Dex `int-to-byte' instruction.
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002641 if (in.IsRegister()) {
2642 __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00002643 } else {
2644 DCHECK(in.GetConstant()->IsIntConstant());
2645 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
2646 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
2647 }
Roland Levillain51d3fc42014-11-13 14:11:42 +00002648 break;
2649
2650 default:
2651 LOG(FATAL) << "Unexpected type conversion from " << input_type
2652 << " to " << result_type;
2653 }
2654 break;
2655
Roland Levillain01a8d712014-11-14 16:27:39 +00002656 case Primitive::kPrimShort:
2657 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002658 case Primitive::kPrimLong:
2659 // Type conversion from long to short is a result of code transformations.
2660 if (in.IsRegisterPair()) {
2661 __ movsxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2662 } else if (in.IsDoubleStackSlot()) {
2663 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2664 } else {
2665 DCHECK(in.GetConstant()->IsLongConstant());
2666 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2667 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
2668 }
2669 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002670 case Primitive::kPrimBoolean:
2671 // Boolean input is a result of code transformations.
Roland Levillain01a8d712014-11-14 16:27:39 +00002672 case Primitive::kPrimByte:
2673 case Primitive::kPrimInt:
2674 case Primitive::kPrimChar:
2675 // Processing a Dex `int-to-short' instruction.
2676 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002677 __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain01a8d712014-11-14 16:27:39 +00002678 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002679 __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain01a8d712014-11-14 16:27:39 +00002680 } else {
2681 DCHECK(in.GetConstant()->IsIntConstant());
2682 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002683 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
Roland Levillain01a8d712014-11-14 16:27:39 +00002684 }
2685 break;
2686
2687 default:
2688 LOG(FATAL) << "Unexpected type conversion from " << input_type
2689 << " to " << result_type;
2690 }
2691 break;
2692
Roland Levillain946e1432014-11-11 17:35:19 +00002693 case Primitive::kPrimInt:
2694 switch (input_type) {
2695 case Primitive::kPrimLong:
Roland Levillain981e4542014-11-14 11:47:14 +00002696 // Processing a Dex `long-to-int' instruction.
Roland Levillain946e1432014-11-11 17:35:19 +00002697 if (in.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002698 __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
Roland Levillain946e1432014-11-11 17:35:19 +00002699 } else if (in.IsDoubleStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002700 __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain946e1432014-11-11 17:35:19 +00002701 } else {
2702 DCHECK(in.IsConstant());
2703 DCHECK(in.GetConstant()->IsLongConstant());
2704 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002705 __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
Roland Levillain946e1432014-11-11 17:35:19 +00002706 }
2707 break;
2708
Roland Levillain3f8f9362014-12-02 17:45:01 +00002709 case Primitive::kPrimFloat: {
2710 // Processing a Dex `float-to-int' instruction.
2711 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2712 Register output = out.AsRegister<Register>();
2713 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002714 NearLabel done, nan;
Roland Levillain3f8f9362014-12-02 17:45:01 +00002715
2716 __ movl(output, Immediate(kPrimIntMax));
2717 // temp = int-to-float(output)
2718 __ cvtsi2ss(temp, output);
2719 // if input >= temp goto done
2720 __ comiss(input, temp);
2721 __ j(kAboveEqual, &done);
2722 // if input == NaN goto nan
2723 __ j(kUnordered, &nan);
2724 // output = float-to-int-truncate(input)
2725 __ cvttss2si(output, input);
2726 __ jmp(&done);
2727 __ Bind(&nan);
2728 // output = 0
2729 __ xorl(output, output);
2730 __ Bind(&done);
2731 break;
2732 }
2733
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002734 case Primitive::kPrimDouble: {
2735 // Processing a Dex `double-to-int' instruction.
2736 XmmRegister input = in.AsFpuRegister<XmmRegister>();
2737 Register output = out.AsRegister<Register>();
2738 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002739 NearLabel done, nan;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002740
2741 __ movl(output, Immediate(kPrimIntMax));
2742 // temp = int-to-double(output)
2743 __ cvtsi2sd(temp, output);
2744 // if input >= temp goto done
2745 __ comisd(input, temp);
2746 __ j(kAboveEqual, &done);
2747 // if input == NaN goto nan
2748 __ j(kUnordered, &nan);
2749 // output = double-to-int-truncate(input)
2750 __ cvttsd2si(output, input);
2751 __ jmp(&done);
2752 __ Bind(&nan);
2753 // output = 0
2754 __ xorl(output, output);
2755 __ Bind(&done);
Roland Levillain946e1432014-11-11 17:35:19 +00002756 break;
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002757 }
Roland Levillain946e1432014-11-11 17:35:19 +00002758
2759 default:
2760 LOG(FATAL) << "Unexpected type conversion from " << input_type
2761 << " to " << result_type;
2762 }
2763 break;
2764
Roland Levillaindff1f282014-11-05 14:15:05 +00002765 case Primitive::kPrimLong:
2766 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002767 case Primitive::kPrimBoolean:
2768 // Boolean input is a result of code transformations.
Roland Levillaindff1f282014-11-05 14:15:05 +00002769 case Primitive::kPrimByte:
2770 case Primitive::kPrimShort:
2771 case Primitive::kPrimInt:
Roland Levillain666c7322014-11-10 13:39:43 +00002772 case Primitive::kPrimChar:
Roland Levillain981e4542014-11-14 11:47:14 +00002773 // Processing a Dex `int-to-long' instruction.
Roland Levillaindff1f282014-11-05 14:15:05 +00002774 DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
2775 DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
Roland Levillain271ab9c2014-11-27 15:23:57 +00002776 DCHECK_EQ(in.AsRegister<Register>(), EAX);
Roland Levillaindff1f282014-11-05 14:15:05 +00002777 __ cdq();
2778 break;
2779
2780 case Primitive::kPrimFloat:
Roland Levillain624279f2014-12-04 11:54:28 +00002781 // Processing a Dex `float-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002782 codegen_->InvokeRuntime(kQuickF2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002783 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
Roland Levillain624279f2014-12-04 11:54:28 +00002784 break;
2785
Roland Levillaindff1f282014-11-05 14:15:05 +00002786 case Primitive::kPrimDouble:
Roland Levillain4c0b61f2014-12-05 12:06:01 +00002787 // Processing a Dex `double-to-long' instruction.
Serban Constantinescuba45db02016-07-12 22:53:02 +01002788 codegen_->InvokeRuntime(kQuickD2l, conversion, conversion->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00002789 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
Roland Levillaindff1f282014-11-05 14:15:05 +00002790 break;
2791
2792 default:
2793 LOG(FATAL) << "Unexpected type conversion from " << input_type
2794 << " to " << result_type;
2795 }
2796 break;
2797
Roland Levillain981e4542014-11-14 11:47:14 +00002798 case Primitive::kPrimChar:
2799 switch (input_type) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00002800 case Primitive::kPrimLong:
2801 // Type conversion from long to short is a result of code transformations.
2802 if (in.IsRegisterPair()) {
2803 __ movzxw(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
2804 } else if (in.IsDoubleStackSlot()) {
2805 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
2806 } else {
2807 DCHECK(in.GetConstant()->IsLongConstant());
2808 int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
2809 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
2810 }
2811 break;
David Brazdil46e2a392015-03-16 17:31:52 +00002812 case Primitive::kPrimBoolean:
2813 // Boolean input is a result of code transformations.
Roland Levillain981e4542014-11-14 11:47:14 +00002814 case Primitive::kPrimByte:
2815 case Primitive::kPrimShort:
2816 case Primitive::kPrimInt:
Roland Levillain981e4542014-11-14 11:47:14 +00002817 // Processing a Dex `Process a Dex `int-to-char'' instruction.
2818 if (in.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002819 __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
Roland Levillain981e4542014-11-14 11:47:14 +00002820 } else if (in.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00002821 __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
Roland Levillain981e4542014-11-14 11:47:14 +00002822 } else {
2823 DCHECK(in.GetConstant()->IsIntConstant());
2824 int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
Roland Levillain271ab9c2014-11-27 15:23:57 +00002825 __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
Roland Levillain981e4542014-11-14 11:47:14 +00002826 }
2827 break;
2828
2829 default:
2830 LOG(FATAL) << "Unexpected type conversion from " << input_type
2831 << " to " << result_type;
2832 }
2833 break;
2834
Roland Levillaindff1f282014-11-05 14:15:05 +00002835 case Primitive::kPrimFloat:
Roland Levillaincff13742014-11-17 14:32:17 +00002836 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002837 case Primitive::kPrimBoolean:
2838 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002839 case Primitive::kPrimByte:
2840 case Primitive::kPrimShort:
2841 case Primitive::kPrimInt:
2842 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002843 // Processing a Dex `int-to-float' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002844 __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002845 break;
2846
Roland Levillain6d0e4832014-11-27 18:31:21 +00002847 case Primitive::kPrimLong: {
2848 // Processing a Dex `long-to-float' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002849 size_t adjustment = 0;
Roland Levillain6d0e4832014-11-27 18:31:21 +00002850
Roland Levillain232ade02015-04-20 15:14:36 +01002851 // Create stack space for the call to
2852 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstps below.
2853 // TODO: enhance register allocator to ask for stack temporaries.
2854 if (!in.IsDoubleStackSlot() || !out.IsStackSlot()) {
2855 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2856 __ subl(ESP, Immediate(adjustment));
2857 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002858
Roland Levillain232ade02015-04-20 15:14:36 +01002859 // Load the value to the FP stack, using temporaries if needed.
2860 PushOntoFPStack(in, 0, adjustment, false, true);
2861
2862 if (out.IsStackSlot()) {
2863 __ fstps(Address(ESP, out.GetStackIndex() + adjustment));
2864 } else {
2865 __ fstps(Address(ESP, 0));
2866 Location stack_temp = Location::StackSlot(0);
2867 codegen_->Move32(out, stack_temp);
2868 }
2869
2870 // Remove the temporary stack space we allocated.
2871 if (adjustment != 0) {
2872 __ addl(ESP, Immediate(adjustment));
2873 }
Roland Levillain6d0e4832014-11-27 18:31:21 +00002874 break;
2875 }
2876
Roland Levillaincff13742014-11-17 14:32:17 +00002877 case Primitive::kPrimDouble:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002878 // Processing a Dex `double-to-float' instruction.
2879 __ cvtsd2ss(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002880 break;
2881
2882 default:
2883 LOG(FATAL) << "Unexpected type conversion from " << input_type
2884 << " to " << result_type;
2885 };
2886 break;
2887
Roland Levillaindff1f282014-11-05 14:15:05 +00002888 case Primitive::kPrimDouble:
Roland Levillaincff13742014-11-17 14:32:17 +00002889 switch (input_type) {
David Brazdil46e2a392015-03-16 17:31:52 +00002890 case Primitive::kPrimBoolean:
2891 // Boolean input is a result of code transformations.
Roland Levillaincff13742014-11-17 14:32:17 +00002892 case Primitive::kPrimByte:
2893 case Primitive::kPrimShort:
2894 case Primitive::kPrimInt:
2895 case Primitive::kPrimChar:
Roland Levillain6d0e4832014-11-27 18:31:21 +00002896 // Processing a Dex `int-to-double' instruction.
Roland Levillain271ab9c2014-11-27 15:23:57 +00002897 __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
Roland Levillaincff13742014-11-17 14:32:17 +00002898 break;
2899
Roland Levillain647b9ed2014-11-27 12:06:00 +00002900 case Primitive::kPrimLong: {
2901 // Processing a Dex `long-to-double' instruction.
Roland Levillain232ade02015-04-20 15:14:36 +01002902 size_t adjustment = 0;
Roland Levillain647b9ed2014-11-27 12:06:00 +00002903
Roland Levillain232ade02015-04-20 15:14:36 +01002904 // Create stack space for the call to
2905 // InstructionCodeGeneratorX86::PushOntoFPStack and/or X86Assembler::fstpl below.
2906 // TODO: enhance register allocator to ask for stack temporaries.
2907 if (!in.IsDoubleStackSlot() || !out.IsDoubleStackSlot()) {
2908 adjustment = Primitive::ComponentSize(Primitive::kPrimLong);
2909 __ subl(ESP, Immediate(adjustment));
2910 }
2911
2912 // Load the value to the FP stack, using temporaries if needed.
2913 PushOntoFPStack(in, 0, adjustment, false, true);
2914
2915 if (out.IsDoubleStackSlot()) {
2916 __ fstpl(Address(ESP, out.GetStackIndex() + adjustment));
2917 } else {
2918 __ fstpl(Address(ESP, 0));
2919 Location stack_temp = Location::DoubleStackSlot(0);
2920 codegen_->Move64(out, stack_temp);
2921 }
2922
2923 // Remove the temporary stack space we allocated.
2924 if (adjustment != 0) {
2925 __ addl(ESP, Immediate(adjustment));
2926 }
Roland Levillain647b9ed2014-11-27 12:06:00 +00002927 break;
2928 }
2929
Roland Levillaincff13742014-11-17 14:32:17 +00002930 case Primitive::kPrimFloat:
Roland Levillain8964e2b2014-12-04 12:10:50 +00002931 // Processing a Dex `float-to-double' instruction.
2932 __ cvtss2sd(out.AsFpuRegister<XmmRegister>(), in.AsFpuRegister<XmmRegister>());
Roland Levillaincff13742014-11-17 14:32:17 +00002933 break;
2934
2935 default:
2936 LOG(FATAL) << "Unexpected type conversion from " << input_type
2937 << " to " << result_type;
2938 };
Roland Levillaindff1f282014-11-05 14:15:05 +00002939 break;
2940
2941 default:
2942 LOG(FATAL) << "Unexpected type conversion from " << input_type
2943 << " to " << result_type;
2944 }
2945}
2946
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002947void LocationsBuilderX86::VisitAdd(HAdd* add) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01002948 LocationSummary* locations =
2949 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002950 switch (add->GetResultType()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002951 case Primitive::kPrimInt: {
2952 locations->SetInAt(0, Location::RequiresRegister());
2953 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
2954 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2955 break;
2956 }
2957
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002958 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01002959 locations->SetInAt(0, Location::RequiresRegister());
2960 locations->SetInAt(1, Location::Any());
2961 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002962 break;
2963 }
2964
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002965 case Primitive::kPrimFloat:
2966 case Primitive::kPrimDouble: {
2967 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002968 if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
2969 DCHECK(add->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00002970 } else if (add->InputAt(1)->IsConstant()) {
2971 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00002972 } else {
2973 locations->SetInAt(1, Location::Any());
2974 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002975 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002976 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002977 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002978
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002979 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002980 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
2981 break;
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002982 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002983}
2984
2985void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
2986 LocationSummary* locations = add->GetLocations();
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002987 Location first = locations->InAt(0);
2988 Location second = locations->InAt(1);
Mark Mendell09b84632015-02-13 17:48:38 -05002989 Location out = locations->Out();
2990
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00002991 switch (add->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01002992 case Primitive::kPrimInt: {
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01002993 if (second.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05002994 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
2995 __ addl(out.AsRegister<Register>(), second.AsRegister<Register>());
Mark Mendell33bf2452015-05-27 10:08:24 -04002996 } else if (out.AsRegister<Register>() == second.AsRegister<Register>()) {
2997 __ addl(out.AsRegister<Register>(), first.AsRegister<Register>());
Mark Mendell09b84632015-02-13 17:48:38 -05002998 } else {
2999 __ leal(out.AsRegister<Register>(), Address(
3000 first.AsRegister<Register>(), second.AsRegister<Register>(), TIMES_1, 0));
3001 }
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003002 } else if (second.IsConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05003003 int32_t value = second.GetConstant()->AsIntConstant()->GetValue();
3004 if (out.AsRegister<Register>() == first.AsRegister<Register>()) {
3005 __ addl(out.AsRegister<Register>(), Immediate(value));
3006 } else {
3007 __ leal(out.AsRegister<Register>(), Address(first.AsRegister<Register>(), value));
3008 }
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003009 } else {
Mark Mendell09b84632015-02-13 17:48:38 -05003010 DCHECK(first.Equals(locations->Out()));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003011 __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003012 }
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003013 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003014 }
3015
3016 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003017 if (second.IsRegisterPair()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003018 __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3019 __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003020 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003021 __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3022 __ adcl(first.AsRegisterPairHigh<Register>(),
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003023 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003024 } else {
3025 DCHECK(second.IsConstant()) << second;
3026 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3027 __ addl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3028 __ adcl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003029 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003030 break;
3031 }
3032
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003033 case Primitive::kPrimFloat: {
3034 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003035 __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003036 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3037 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003038 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003039 __ addss(first.AsFpuRegister<XmmRegister>(),
3040 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003041 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3042 const_area->GetBaseMethodAddress(),
3043 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003044 } else {
3045 DCHECK(second.IsStackSlot());
3046 __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003047 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003048 break;
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003049 }
3050
3051 case Primitive::kPrimDouble: {
3052 if (second.IsFpuRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003053 __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
Mark Mendell0616ae02015-04-17 12:49:27 -04003054 } else if (add->InputAt(1)->IsX86LoadFromConstantTable()) {
3055 HX86LoadFromConstantTable* const_area = add->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003056 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003057 __ addsd(first.AsFpuRegister<XmmRegister>(),
3058 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003059 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3060 const_area->GetBaseMethodAddress(),
3061 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003062 } else {
3063 DCHECK(second.IsDoubleStackSlot());
3064 __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003065 }
3066 break;
3067 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003068
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003069 default:
Nicolas Geoffray7fb49da2014-10-06 09:12:41 +01003070 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
Nicolas Geoffrayd8ee7372014-03-28 15:43:40 +00003071 }
3072}
3073
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003074void LocationsBuilderX86::VisitSub(HSub* sub) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01003075 LocationSummary* locations =
3076 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003077 switch (sub->GetResultType()) {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003078 case Primitive::kPrimInt:
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003079 case Primitive::kPrimLong: {
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003080 locations->SetInAt(0, Location::RequiresRegister());
3081 locations->SetInAt(1, Location::Any());
3082 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003083 break;
3084 }
Calin Juravle11351682014-10-23 15:38:15 +01003085 case Primitive::kPrimFloat:
3086 case Primitive::kPrimDouble: {
3087 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003088 if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3089 DCHECK(sub->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003090 } else if (sub->InputAt(1)->IsConstant()) {
3091 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003092 } else {
3093 locations->SetInAt(1, Location::Any());
3094 }
Calin Juravle11351682014-10-23 15:38:15 +01003095 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003096 break;
Calin Juravle11351682014-10-23 15:38:15 +01003097 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003098
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003099 default:
Calin Juravle11351682014-10-23 15:38:15 +01003100 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003101 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003102}
3103
3104void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
3105 LocationSummary* locations = sub->GetLocations();
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003106 Location first = locations->InAt(0);
3107 Location second = locations->InAt(1);
Calin Juravle11351682014-10-23 15:38:15 +01003108 DCHECK(first.Equals(locations->Out()));
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003109 switch (sub->GetResultType()) {
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003110 case Primitive::kPrimInt: {
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003111 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003112 __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003113 } else if (second.IsConstant()) {
Roland Levillain199f3362014-11-27 17:15:16 +00003114 __ subl(first.AsRegister<Register>(),
3115 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003116 } else {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003117 __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003118 }
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003119 break;
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003120 }
3121
3122 case Primitive::kPrimLong: {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00003123 if (second.IsRegisterPair()) {
Calin Juravle11351682014-10-23 15:38:15 +01003124 __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3125 __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003126 } else if (second.IsDoubleStackSlot()) {
Calin Juravle11351682014-10-23 15:38:15 +01003127 __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01003128 __ sbbl(first.AsRegisterPairHigh<Register>(),
3129 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003130 } else {
3131 DCHECK(second.IsConstant()) << second;
3132 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3133 __ subl(first.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
3134 __ sbbl(first.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01003135 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003136 break;
3137 }
3138
Calin Juravle11351682014-10-23 15:38:15 +01003139 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003140 if (second.IsFpuRegister()) {
3141 __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3142 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3143 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003144 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003145 __ subss(first.AsFpuRegister<XmmRegister>(),
3146 codegen_->LiteralFloatAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003147 const_area->GetConstant()->AsFloatConstant()->GetValue(),
3148 const_area->GetBaseMethodAddress(),
3149 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003150 } else {
3151 DCHECK(second.IsStackSlot());
3152 __ subss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3153 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003154 break;
Calin Juravle11351682014-10-23 15:38:15 +01003155 }
3156
3157 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003158 if (second.IsFpuRegister()) {
3159 __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3160 } else if (sub->InputAt(1)->IsX86LoadFromConstantTable()) {
3161 HX86LoadFromConstantTable* const_area = sub->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003162 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003163 __ subsd(first.AsFpuRegister<XmmRegister>(),
3164 codegen_->LiteralDoubleAddress(
3165 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003166 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003167 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3168 } else {
3169 DCHECK(second.IsDoubleStackSlot());
3170 __ subsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3171 }
Calin Juravle11351682014-10-23 15:38:15 +01003172 break;
3173 }
Nicolas Geoffray01bc96d2014-04-11 17:43:50 +01003174
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003175 default:
Calin Juravle11351682014-10-23 15:38:15 +01003176 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01003177 }
3178}
3179
Calin Juravle34bacdf2014-10-07 20:23:36 +01003180void LocationsBuilderX86::VisitMul(HMul* mul) {
3181 LocationSummary* locations =
3182 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3183 switch (mul->GetResultType()) {
3184 case Primitive::kPrimInt:
3185 locations->SetInAt(0, Location::RequiresRegister());
3186 locations->SetInAt(1, Location::Any());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003187 if (mul->InputAt(1)->IsIntConstant()) {
3188 // Can use 3 operand multiply.
3189 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3190 } else {
3191 locations->SetOut(Location::SameAsFirstInput());
3192 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003193 break;
3194 case Primitive::kPrimLong: {
3195 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003196 locations->SetInAt(1, Location::Any());
3197 locations->SetOut(Location::SameAsFirstInput());
3198 // Needed for imul on 32bits with 64bits output.
3199 locations->AddTemp(Location::RegisterLocation(EAX));
3200 locations->AddTemp(Location::RegisterLocation(EDX));
3201 break;
3202 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003203 case Primitive::kPrimFloat:
3204 case Primitive::kPrimDouble: {
3205 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003206 if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3207 DCHECK(mul->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003208 } else if (mul->InputAt(1)->IsConstant()) {
3209 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003210 } else {
3211 locations->SetInAt(1, Location::Any());
3212 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003213 locations->SetOut(Location::SameAsFirstInput());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003214 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003215 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003216
3217 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003218 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003219 }
3220}
3221
3222void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
3223 LocationSummary* locations = mul->GetLocations();
3224 Location first = locations->InAt(0);
3225 Location second = locations->InAt(1);
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003226 Location out = locations->Out();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003227
3228 switch (mul->GetResultType()) {
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003229 case Primitive::kPrimInt:
3230 // The constant may have ended up in a register, so test explicitly to avoid
3231 // problems where the output may not be the same as the first operand.
3232 if (mul->InputAt(1)->IsIntConstant()) {
3233 Immediate imm(mul->InputAt(1)->AsIntConstant()->GetValue());
3234 __ imull(out.AsRegister<Register>(), first.AsRegister<Register>(), imm);
3235 } else if (second.IsRegister()) {
3236 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003237 __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003238 } else {
3239 DCHECK(second.IsStackSlot());
Mark Mendell4a2aa4a2015-07-27 16:13:10 -04003240 DCHECK(first.Equals(out));
Roland Levillain271ab9c2014-11-27 15:23:57 +00003241 __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Calin Juravle34bacdf2014-10-07 20:23:36 +01003242 }
3243 break;
Calin Juravle34bacdf2014-10-07 20:23:36 +01003244
3245 case Primitive::kPrimLong: {
Calin Juravle34bacdf2014-10-07 20:23:36 +01003246 Register in1_hi = first.AsRegisterPairHigh<Register>();
3247 Register in1_lo = first.AsRegisterPairLow<Register>();
Roland Levillain271ab9c2014-11-27 15:23:57 +00003248 Register eax = locations->GetTemp(0).AsRegister<Register>();
3249 Register edx = locations->GetTemp(1).AsRegister<Register>();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003250
3251 DCHECK_EQ(EAX, eax);
3252 DCHECK_EQ(EDX, edx);
3253
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003254 // input: in1 - 64 bits, in2 - 64 bits.
Calin Juravle34bacdf2014-10-07 20:23:36 +01003255 // output: in1
3256 // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
3257 // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
3258 // parts: in1.lo = (in1.lo * in2.lo)[31:0]
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003259 if (second.IsConstant()) {
3260 DCHECK(second.GetConstant()->IsLongConstant());
Calin Juravle34bacdf2014-10-07 20:23:36 +01003261
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003262 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
3263 int32_t low_value = Low32Bits(value);
3264 int32_t high_value = High32Bits(value);
3265 Immediate low(low_value);
3266 Immediate high(high_value);
3267
3268 __ movl(eax, high);
3269 // eax <- in1.lo * in2.hi
3270 __ imull(eax, in1_lo);
3271 // in1.hi <- in1.hi * in2.lo
3272 __ imull(in1_hi, low);
3273 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3274 __ addl(in1_hi, eax);
3275 // move in2_lo to eax to prepare for double precision
3276 __ movl(eax, low);
3277 // edx:eax <- in1.lo * in2.lo
3278 __ mull(in1_lo);
3279 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3280 __ addl(in1_hi, edx);
3281 // in1.lo <- (in1.lo * in2.lo)[31:0];
3282 __ movl(in1_lo, eax);
3283 } else if (second.IsRegisterPair()) {
3284 Register in2_hi = second.AsRegisterPairHigh<Register>();
3285 Register in2_lo = second.AsRegisterPairLow<Register>();
3286
3287 __ movl(eax, in2_hi);
3288 // eax <- in1.lo * in2.hi
3289 __ imull(eax, in1_lo);
3290 // in1.hi <- in1.hi * in2.lo
3291 __ imull(in1_hi, in2_lo);
3292 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3293 __ addl(in1_hi, eax);
3294 // move in1_lo to eax to prepare for double precision
3295 __ movl(eax, in1_lo);
3296 // edx:eax <- in1.lo * in2.lo
3297 __ mull(in2_lo);
3298 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3299 __ addl(in1_hi, edx);
3300 // in1.lo <- (in1.lo * in2.lo)[31:0];
3301 __ movl(in1_lo, eax);
3302 } else {
3303 DCHECK(second.IsDoubleStackSlot()) << second;
3304 Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
3305 Address in2_lo(ESP, second.GetStackIndex());
3306
3307 __ movl(eax, in2_hi);
3308 // eax <- in1.lo * in2.hi
3309 __ imull(eax, in1_lo);
3310 // in1.hi <- in1.hi * in2.lo
3311 __ imull(in1_hi, in2_lo);
3312 // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
3313 __ addl(in1_hi, eax);
3314 // move in1_lo to eax to prepare for double precision
3315 __ movl(eax, in1_lo);
3316 // edx:eax <- in1.lo * in2.lo
3317 __ mull(in2_lo);
3318 // in1.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
3319 __ addl(in1_hi, edx);
3320 // in1.lo <- (in1.lo * in2.lo)[31:0];
3321 __ movl(in1_lo, eax);
3322 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003323
3324 break;
3325 }
3326
Calin Juravleb5bfa962014-10-21 18:02:24 +01003327 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003328 DCHECK(first.Equals(locations->Out()));
3329 if (second.IsFpuRegister()) {
3330 __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3331 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3332 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003333 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003334 __ mulss(first.AsFpuRegister<XmmRegister>(),
3335 codegen_->LiteralFloatAddress(
3336 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003337 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003338 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3339 } else {
3340 DCHECK(second.IsStackSlot());
3341 __ mulss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3342 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003343 break;
Calin Juravleb5bfa962014-10-21 18:02:24 +01003344 }
3345
3346 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003347 DCHECK(first.Equals(locations->Out()));
3348 if (second.IsFpuRegister()) {
3349 __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3350 } else if (mul->InputAt(1)->IsX86LoadFromConstantTable()) {
3351 HX86LoadFromConstantTable* const_area = mul->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003352 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003353 __ mulsd(first.AsFpuRegister<XmmRegister>(),
3354 codegen_->LiteralDoubleAddress(
3355 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003356 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003357 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3358 } else {
3359 DCHECK(second.IsDoubleStackSlot());
3360 __ mulsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3361 }
Calin Juravleb5bfa962014-10-21 18:02:24 +01003362 break;
3363 }
Calin Juravle34bacdf2014-10-07 20:23:36 +01003364
3365 default:
Calin Juravleb5bfa962014-10-21 18:02:24 +01003366 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
Calin Juravle34bacdf2014-10-07 20:23:36 +01003367 }
3368}
3369
Roland Levillain232ade02015-04-20 15:14:36 +01003370void InstructionCodeGeneratorX86::PushOntoFPStack(Location source,
3371 uint32_t temp_offset,
3372 uint32_t stack_adjustment,
3373 bool is_fp,
3374 bool is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003375 if (source.IsStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003376 DCHECK(!is_wide);
3377 if (is_fp) {
3378 __ flds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3379 } else {
3380 __ filds(Address(ESP, source.GetStackIndex() + stack_adjustment));
3381 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003382 } else if (source.IsDoubleStackSlot()) {
Roland Levillain232ade02015-04-20 15:14:36 +01003383 DCHECK(is_wide);
3384 if (is_fp) {
3385 __ fldl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3386 } else {
3387 __ fildl(Address(ESP, source.GetStackIndex() + stack_adjustment));
3388 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003389 } else {
3390 // Write the value to the temporary location on the stack and load to FP stack.
Roland Levillain232ade02015-04-20 15:14:36 +01003391 if (!is_wide) {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003392 Location stack_temp = Location::StackSlot(temp_offset);
3393 codegen_->Move32(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003394 if (is_fp) {
3395 __ flds(Address(ESP, temp_offset));
3396 } else {
3397 __ filds(Address(ESP, temp_offset));
3398 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003399 } else {
3400 Location stack_temp = Location::DoubleStackSlot(temp_offset);
3401 codegen_->Move64(stack_temp, source);
Roland Levillain232ade02015-04-20 15:14:36 +01003402 if (is_fp) {
3403 __ fldl(Address(ESP, temp_offset));
3404 } else {
3405 __ fildl(Address(ESP, temp_offset));
3406 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003407 }
3408 }
3409}
3410
3411void InstructionCodeGeneratorX86::GenerateRemFP(HRem *rem) {
3412 Primitive::Type type = rem->GetResultType();
3413 bool is_float = type == Primitive::kPrimFloat;
3414 size_t elem_size = Primitive::ComponentSize(type);
3415 LocationSummary* locations = rem->GetLocations();
3416 Location first = locations->InAt(0);
3417 Location second = locations->InAt(1);
3418 Location out = locations->Out();
3419
3420 // Create stack space for 2 elements.
3421 // TODO: enhance register allocator to ask for stack temporaries.
3422 __ subl(ESP, Immediate(2 * elem_size));
3423
3424 // Load the values to the FP stack in reverse order, using temporaries if needed.
Roland Levillain232ade02015-04-20 15:14:36 +01003425 const bool is_wide = !is_float;
3426 PushOntoFPStack(second, elem_size, 2 * elem_size, /* is_fp */ true, is_wide);
3427 PushOntoFPStack(first, 0, 2 * elem_size, /* is_fp */ true, is_wide);
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003428
3429 // Loop doing FPREM until we stabilize.
Mark Mendell0c9497d2015-08-21 09:30:05 -04003430 NearLabel retry;
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003431 __ Bind(&retry);
3432 __ fprem();
3433
3434 // Move FP status to AX.
3435 __ fstsw();
3436
3437 // And see if the argument reduction is complete. This is signaled by the
3438 // C2 FPU flag bit set to 0.
3439 __ andl(EAX, Immediate(kC2ConditionMask));
3440 __ j(kNotEqual, &retry);
3441
3442 // We have settled on the final value. Retrieve it into an XMM register.
3443 // Store FP top of stack to real stack.
3444 if (is_float) {
3445 __ fsts(Address(ESP, 0));
3446 } else {
3447 __ fstl(Address(ESP, 0));
3448 }
3449
3450 // Pop the 2 items from the FP stack.
3451 __ fucompp();
3452
3453 // Load the value from the stack into an XMM register.
3454 DCHECK(out.IsFpuRegister()) << out;
3455 if (is_float) {
3456 __ movss(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3457 } else {
3458 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
3459 }
3460
3461 // And remove the temporary stack space we allocated.
3462 __ addl(ESP, Immediate(2 * elem_size));
3463}
3464
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003465
3466void InstructionCodeGeneratorX86::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3467 DCHECK(instruction->IsDiv() || instruction->IsRem());
3468
3469 LocationSummary* locations = instruction->GetLocations();
3470 DCHECK(locations->InAt(1).IsConstant());
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003471 DCHECK(locations->InAt(1).GetConstant()->IsIntConstant());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003472
3473 Register out_register = locations->Out().AsRegister<Register>();
3474 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003475 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003476
3477 DCHECK(imm == 1 || imm == -1);
3478
3479 if (instruction->IsRem()) {
3480 __ xorl(out_register, out_register);
3481 } else {
3482 __ movl(out_register, input_register);
3483 if (imm == -1) {
3484 __ negl(out_register);
3485 }
3486 }
3487}
3488
3489
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003490void InstructionCodeGeneratorX86::DivByPowerOfTwo(HDiv* instruction) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003491 LocationSummary* locations = instruction->GetLocations();
3492
3493 Register out_register = locations->Out().AsRegister<Register>();
3494 Register input_register = locations->InAt(0).AsRegister<Register>();
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003495 int32_t imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003496 DCHECK(IsPowerOfTwo(AbsOrMin(imm)));
3497 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003498
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003499 Register num = locations->GetTemp(0).AsRegister<Register>();
3500
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003501 __ leal(num, Address(input_register, abs_imm - 1));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003502 __ testl(input_register, input_register);
3503 __ cmovl(kGreaterEqual, num, input_register);
3504 int shift = CTZ(imm);
3505 __ sarl(num, Immediate(shift));
3506
3507 if (imm < 0) {
3508 __ negl(num);
3509 }
3510
3511 __ movl(out_register, num);
3512}
3513
3514void InstructionCodeGeneratorX86::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3515 DCHECK(instruction->IsDiv() || instruction->IsRem());
3516
3517 LocationSummary* locations = instruction->GetLocations();
3518 int imm = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
3519
3520 Register eax = locations->InAt(0).AsRegister<Register>();
3521 Register out = locations->Out().AsRegister<Register>();
3522 Register num;
3523 Register edx;
3524
3525 if (instruction->IsDiv()) {
3526 edx = locations->GetTemp(0).AsRegister<Register>();
3527 num = locations->GetTemp(1).AsRegister<Register>();
3528 } else {
3529 edx = locations->Out().AsRegister<Register>();
3530 num = locations->GetTemp(0).AsRegister<Register>();
3531 }
3532
3533 DCHECK_EQ(EAX, eax);
3534 DCHECK_EQ(EDX, edx);
3535 if (instruction->IsDiv()) {
3536 DCHECK_EQ(EAX, out);
3537 } else {
3538 DCHECK_EQ(EDX, out);
3539 }
3540
3541 int64_t magic;
3542 int shift;
3543 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3544
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003545 // Save the numerator.
3546 __ movl(num, eax);
3547
3548 // EAX = magic
3549 __ movl(eax, Immediate(magic));
3550
3551 // EDX:EAX = magic * numerator
3552 __ imull(num);
3553
3554 if (imm > 0 && magic < 0) {
3555 // EDX += num
3556 __ addl(edx, num);
3557 } else if (imm < 0 && magic > 0) {
3558 __ subl(edx, num);
3559 }
3560
3561 // Shift if needed.
3562 if (shift != 0) {
3563 __ sarl(edx, Immediate(shift));
3564 }
3565
3566 // EDX += 1 if EDX < 0
3567 __ movl(eax, edx);
3568 __ shrl(edx, Immediate(31));
3569 __ addl(edx, eax);
3570
3571 if (instruction->IsRem()) {
3572 __ movl(eax, num);
3573 __ imull(edx, Immediate(imm));
3574 __ subl(eax, edx);
3575 __ movl(edx, eax);
3576 } else {
3577 __ movl(eax, edx);
3578 }
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003579}
3580
Calin Juravlebacfec32014-11-14 15:54:36 +00003581void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3582 DCHECK(instruction->IsDiv() || instruction->IsRem());
3583
3584 LocationSummary* locations = instruction->GetLocations();
3585 Location out = locations->Out();
3586 Location first = locations->InAt(0);
3587 Location second = locations->InAt(1);
3588 bool is_div = instruction->IsDiv();
3589
3590 switch (instruction->GetResultType()) {
3591 case Primitive::kPrimInt: {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003592 DCHECK_EQ(EAX, first.AsRegister<Register>());
3593 DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
Calin Juravlebacfec32014-11-14 15:54:36 +00003594
Vladimir Marko13c86fd2015-11-11 12:37:46 +00003595 if (second.IsConstant()) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003596 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003597
3598 if (imm == 0) {
3599 // Do not generate anything for 0. DivZeroCheck would forbid any generated code.
3600 } else if (imm == 1 || imm == -1) {
3601 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003602 } else if (is_div && IsPowerOfTwo(AbsOrMin(imm))) {
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003603 DivByPowerOfTwo(instruction->AsDiv());
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003604 } else {
3605 DCHECK(imm <= -2 || imm >= 2);
3606 GenerateDivRemWithAnyConstant(instruction);
3607 }
3608 } else {
David Srbecky9cd6d372016-02-09 15:24:47 +00003609 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86(
3610 instruction, out.AsRegister<Register>(), is_div);
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003611 codegen_->AddSlowPath(slow_path);
Calin Juravlebacfec32014-11-14 15:54:36 +00003612
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003613 Register second_reg = second.AsRegister<Register>();
3614 // 0x80000000/-1 triggers an arithmetic exception!
3615 // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
3616 // it's safe to just use negl instead of more complex comparisons.
Calin Juravlebacfec32014-11-14 15:54:36 +00003617
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003618 __ cmpl(second_reg, Immediate(-1));
3619 __ j(kEqual, slow_path->GetEntryLabel());
Calin Juravlebacfec32014-11-14 15:54:36 +00003620
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003621 // edx:eax <- sign-extended of eax
3622 __ cdq();
3623 // eax = quotient, edx = remainder
3624 __ idivl(second_reg);
3625 __ Bind(slow_path->GetExitLabel());
3626 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003627 break;
3628 }
3629
3630 case Primitive::kPrimLong: {
3631 InvokeRuntimeCallingConvention calling_convention;
3632 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
3633 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
3634 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
3635 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
3636 DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
3637 DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
3638
3639 if (is_div) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003640 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003641 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003642 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01003643 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003644 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
Calin Juravlebacfec32014-11-14 15:54:36 +00003645 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003646 break;
3647 }
3648
3649 default:
3650 LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
3651 }
3652}
3653
Calin Juravle7c4954d2014-10-28 16:57:40 +00003654void LocationsBuilderX86::VisitDiv(HDiv* div) {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003655 LocationSummary::CallKind call_kind = (div->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003656 ? LocationSummary::kCallOnMainOnly
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003657 : LocationSummary::kNoCall;
3658 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3659
Calin Juravle7c4954d2014-10-28 16:57:40 +00003660 switch (div->GetResultType()) {
Calin Juravled0d48522014-11-04 16:40:20 +00003661 case Primitive::kPrimInt: {
3662 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003663 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Calin Juravled0d48522014-11-04 16:40:20 +00003664 locations->SetOut(Location::SameAsFirstInput());
3665 // Intel uses edx:eax as the dividend.
3666 locations->AddTemp(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003667 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3668 // which enforces results to be in EAX and EDX, things are simpler if we use EAX also as
3669 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003670 if (div->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003671 locations->AddTemp(Location::RequiresRegister());
3672 }
Calin Juravled0d48522014-11-04 16:40:20 +00003673 break;
3674 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003675 case Primitive::kPrimLong: {
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003676 InvokeRuntimeCallingConvention calling_convention;
3677 locations->SetInAt(0, Location::RegisterPairLocation(
3678 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3679 locations->SetInAt(1, Location::RegisterPairLocation(
3680 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3681 // Runtime helper puts the result in EAX, EDX.
3682 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
Calin Juravle7c4954d2014-10-28 16:57:40 +00003683 break;
3684 }
3685 case Primitive::kPrimFloat:
3686 case Primitive::kPrimDouble: {
3687 locations->SetInAt(0, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003688 if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3689 DCHECK(div->InputAt(1)->IsEmittedAtUseSite());
Nicolas Geoffray7770a3e2016-02-03 10:13:41 +00003690 } else if (div->InputAt(1)->IsConstant()) {
3691 locations->SetInAt(1, Location::RequiresFpuRegister());
David Brazdilb3e773e2016-01-26 11:28:37 +00003692 } else {
3693 locations->SetInAt(1, Location::Any());
3694 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003695 locations->SetOut(Location::SameAsFirstInput());
3696 break;
3697 }
3698
3699 default:
3700 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3701 }
3702}
3703
3704void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
3705 LocationSummary* locations = div->GetLocations();
3706 Location first = locations->InAt(0);
3707 Location second = locations->InAt(1);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003708
3709 switch (div->GetResultType()) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003710 case Primitive::kPrimInt:
Calin Juravle7c4954d2014-10-28 16:57:40 +00003711 case Primitive::kPrimLong: {
Calin Juravlebacfec32014-11-14 15:54:36 +00003712 GenerateDivRemIntegral(div);
Calin Juravle7c4954d2014-10-28 16:57:40 +00003713 break;
3714 }
3715
3716 case Primitive::kPrimFloat: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003717 if (second.IsFpuRegister()) {
3718 __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3719 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3720 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003721 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003722 __ divss(first.AsFpuRegister<XmmRegister>(),
3723 codegen_->LiteralFloatAddress(
3724 const_area->GetConstant()->AsFloatConstant()->GetValue(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003725 const_area->GetBaseMethodAddress(),
Mark Mendell0616ae02015-04-17 12:49:27 -04003726 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
3727 } else {
3728 DCHECK(second.IsStackSlot());
3729 __ divss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3730 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003731 break;
3732 }
3733
3734 case Primitive::kPrimDouble: {
Mark Mendell0616ae02015-04-17 12:49:27 -04003735 if (second.IsFpuRegister()) {
3736 __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
3737 } else if (div->InputAt(1)->IsX86LoadFromConstantTable()) {
3738 HX86LoadFromConstantTable* const_area = div->InputAt(1)->AsX86LoadFromConstantTable();
David Brazdilb3e773e2016-01-26 11:28:37 +00003739 DCHECK(const_area->IsEmittedAtUseSite());
Mark Mendell0616ae02015-04-17 12:49:27 -04003740 __ divsd(first.AsFpuRegister<XmmRegister>(),
3741 codegen_->LiteralDoubleAddress(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00003742 const_area->GetConstant()->AsDoubleConstant()->GetValue(),
3743 const_area->GetBaseMethodAddress(),
3744 const_area->GetLocations()->InAt(0).AsRegister<Register>()));
Mark Mendell0616ae02015-04-17 12:49:27 -04003745 } else {
3746 DCHECK(second.IsDoubleStackSlot());
3747 __ divsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
3748 }
Calin Juravle7c4954d2014-10-28 16:57:40 +00003749 break;
3750 }
3751
3752 default:
3753 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3754 }
3755}
3756
Calin Juravlebacfec32014-11-14 15:54:36 +00003757void LocationsBuilderX86::VisitRem(HRem* rem) {
Calin Juravled2ec87d2014-12-08 14:24:46 +00003758 Primitive::Type type = rem->GetResultType();
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003759
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003760 LocationSummary::CallKind call_kind = (rem->GetResultType() == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003761 ? LocationSummary::kCallOnMainOnly
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00003762 : LocationSummary::kNoCall;
3763 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
Calin Juravlebacfec32014-11-14 15:54:36 +00003764
Calin Juravled2ec87d2014-12-08 14:24:46 +00003765 switch (type) {
Calin Juravlebacfec32014-11-14 15:54:36 +00003766 case Primitive::kPrimInt: {
3767 locations->SetInAt(0, Location::RegisterLocation(EAX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003768 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Calin Juravlebacfec32014-11-14 15:54:36 +00003769 locations->SetOut(Location::RegisterLocation(EDX));
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003770 // We need to save the numerator while we tweak eax and edx. As we are using imul in a way
3771 // which enforces results to be in EAX and EDX, things are simpler if we use EDX also as
3772 // output and request another temp.
Guillaume Sanchezb19930c2015-04-09 21:12:15 +01003773 if (rem->InputAt(1)->IsIntConstant()) {
Guillaume Sanchez0f88e872015-03-30 17:55:45 +01003774 locations->AddTemp(Location::RequiresRegister());
3775 }
Calin Juravlebacfec32014-11-14 15:54:36 +00003776 break;
3777 }
3778 case Primitive::kPrimLong: {
3779 InvokeRuntimeCallingConvention calling_convention;
3780 locations->SetInAt(0, Location::RegisterPairLocation(
3781 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3782 locations->SetInAt(1, Location::RegisterPairLocation(
3783 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3784 // Runtime helper puts the result in EAX, EDX.
3785 locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
3786 break;
3787 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003788 case Primitive::kPrimDouble:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003789 case Primitive::kPrimFloat: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003790 locations->SetInAt(0, Location::Any());
3791 locations->SetInAt(1, Location::Any());
3792 locations->SetOut(Location::RequiresFpuRegister());
3793 locations->AddTemp(Location::RegisterLocation(EAX));
Calin Juravlebacfec32014-11-14 15:54:36 +00003794 break;
3795 }
3796
3797 default:
Calin Juravled2ec87d2014-12-08 14:24:46 +00003798 LOG(FATAL) << "Unexpected rem type " << type;
Calin Juravlebacfec32014-11-14 15:54:36 +00003799 }
3800}
3801
3802void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
3803 Primitive::Type type = rem->GetResultType();
3804 switch (type) {
3805 case Primitive::kPrimInt:
3806 case Primitive::kPrimLong: {
3807 GenerateDivRemIntegral(rem);
3808 break;
3809 }
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003810 case Primitive::kPrimFloat:
Calin Juravlebacfec32014-11-14 15:54:36 +00003811 case Primitive::kPrimDouble: {
Mark Mendell24f2dfa2015-01-14 19:51:45 -05003812 GenerateRemFP(rem);
Calin Juravlebacfec32014-11-14 15:54:36 +00003813 break;
3814 }
3815 default:
3816 LOG(FATAL) << "Unexpected rem type " << type;
3817 }
3818}
3819
Calin Juravled0d48522014-11-04 16:40:20 +00003820void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003821 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003822 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003823 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003824 case Primitive::kPrimByte:
3825 case Primitive::kPrimChar:
3826 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003827 case Primitive::kPrimInt: {
3828 locations->SetInAt(0, Location::Any());
3829 break;
3830 }
3831 case Primitive::kPrimLong: {
3832 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
3833 if (!instruction->IsConstant()) {
3834 locations->AddTemp(Location::RequiresRegister());
3835 }
3836 break;
3837 }
3838 default:
3839 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
3840 }
Calin Juravled0d48522014-11-04 16:40:20 +00003841}
3842
3843void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07003844 SlowPathCode* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
Calin Juravled0d48522014-11-04 16:40:20 +00003845 codegen_->AddSlowPath(slow_path);
3846
3847 LocationSummary* locations = instruction->GetLocations();
3848 Location value = locations->InAt(0);
3849
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003850 switch (instruction->GetType()) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003851 case Primitive::kPrimBoolean:
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003852 case Primitive::kPrimByte:
3853 case Primitive::kPrimChar:
3854 case Primitive::kPrimShort:
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003855 case Primitive::kPrimInt: {
3856 if (value.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003857 __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003858 __ j(kEqual, slow_path->GetEntryLabel());
3859 } else if (value.IsStackSlot()) {
3860 __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
3861 __ j(kEqual, slow_path->GetEntryLabel());
3862 } else {
3863 DCHECK(value.IsConstant()) << value;
3864 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01003865 __ jmp(slow_path->GetEntryLabel());
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003866 }
3867 }
3868 break;
Calin Juravled0d48522014-11-04 16:40:20 +00003869 }
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003870 case Primitive::kPrimLong: {
3871 if (value.IsRegisterPair()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003872 Register temp = locations->GetTemp(0).AsRegister<Register>();
Calin Juravled6fb6cf2014-11-11 19:07:44 +00003873 __ movl(temp, value.AsRegisterPairLow<Register>());
3874 __ orl(temp, value.AsRegisterPairHigh<Register>());
3875 __ j(kEqual, slow_path->GetEntryLabel());
3876 } else {
3877 DCHECK(value.IsConstant()) << value;
3878 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3879 __ jmp(slow_path->GetEntryLabel());
3880 }
3881 }
3882 break;
3883 }
3884 default:
3885 LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
Calin Juravled0d48522014-11-04 16:40:20 +00003886 }
Calin Juravled0d48522014-11-04 16:40:20 +00003887}
3888
Calin Juravle9aec02f2014-11-18 23:06:35 +00003889void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
3890 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3891
3892 LocationSummary* locations =
3893 new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
3894
3895 switch (op->GetResultType()) {
Mark P Mendell73945692015-04-29 14:56:17 +00003896 case Primitive::kPrimInt:
Calin Juravle9aec02f2014-11-18 23:06:35 +00003897 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003898 // Can't have Location::Any() and output SameAsFirstInput()
Calin Juravle9aec02f2014-11-18 23:06:35 +00003899 locations->SetInAt(0, Location::RequiresRegister());
Mark P Mendell73945692015-04-29 14:56:17 +00003900 // The shift count needs to be in CL or a constant.
3901 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
Calin Juravle9aec02f2014-11-18 23:06:35 +00003902 locations->SetOut(Location::SameAsFirstInput());
3903 break;
3904 }
3905 default:
3906 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3907 }
3908}
3909
3910void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
3911 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
3912
3913 LocationSummary* locations = op->GetLocations();
3914 Location first = locations->InAt(0);
3915 Location second = locations->InAt(1);
3916 DCHECK(first.Equals(locations->Out()));
3917
3918 switch (op->GetResultType()) {
3919 case Primitive::kPrimInt: {
Mark P Mendell73945692015-04-29 14:56:17 +00003920 DCHECK(first.IsRegister());
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003921 Register first_reg = first.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003922 if (second.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00003923 Register second_reg = second.AsRegister<Register>();
Calin Juravle9aec02f2014-11-18 23:06:35 +00003924 DCHECK_EQ(ECX, second_reg);
3925 if (op->IsShl()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003926 __ shll(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003927 } else if (op->IsShr()) {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003928 __ sarl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003929 } else {
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003930 __ shrl(first_reg, second_reg);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003931 }
3932 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00003933 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003934 if (shift == 0) {
3935 return;
3936 }
3937 Immediate imm(shift);
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003938 if (op->IsShl()) {
3939 __ shll(first_reg, imm);
3940 } else if (op->IsShr()) {
3941 __ sarl(first_reg, imm);
3942 } else {
3943 __ shrl(first_reg, imm);
Calin Juravle9aec02f2014-11-18 23:06:35 +00003944 }
3945 }
3946 break;
3947 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003948 case Primitive::kPrimLong: {
Mark P Mendell73945692015-04-29 14:56:17 +00003949 if (second.IsRegister()) {
3950 Register second_reg = second.AsRegister<Register>();
3951 DCHECK_EQ(ECX, second_reg);
3952 if (op->IsShl()) {
3953 GenerateShlLong(first, second_reg);
3954 } else if (op->IsShr()) {
3955 GenerateShrLong(first, second_reg);
3956 } else {
3957 GenerateUShrLong(first, second_reg);
3958 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003959 } else {
Mark P Mendell73945692015-04-29 14:56:17 +00003960 // Shift by a constant.
Roland Levillain5b5b9312016-03-22 14:57:31 +00003961 int32_t shift = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Mark P Mendell73945692015-04-29 14:56:17 +00003962 // Nothing to do if the shift is 0, as the input is already the output.
3963 if (shift != 0) {
3964 if (op->IsShl()) {
3965 GenerateShlLong(first, shift);
3966 } else if (op->IsShr()) {
3967 GenerateShrLong(first, shift);
3968 } else {
3969 GenerateUShrLong(first, shift);
3970 }
3971 }
Roland Levillainf9aac1e2015-04-10 18:12:48 +00003972 }
3973 break;
3974 }
Calin Juravle9aec02f2014-11-18 23:06:35 +00003975 default:
3976 LOG(FATAL) << "Unexpected op type " << op->GetResultType();
3977 }
3978}
3979
Mark P Mendell73945692015-04-29 14:56:17 +00003980void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, int shift) {
3981 Register low = loc.AsRegisterPairLow<Register>();
3982 Register high = loc.AsRegisterPairHigh<Register>();
Mark Mendellba56d062015-05-05 21:34:03 -04003983 if (shift == 1) {
3984 // This is just an addition.
3985 __ addl(low, low);
3986 __ adcl(high, high);
3987 } else if (shift == 32) {
Mark P Mendell73945692015-04-29 14:56:17 +00003988 // Shift by 32 is easy. High gets low, and low gets 0.
3989 codegen_->EmitParallelMoves(
3990 loc.ToLow(),
3991 loc.ToHigh(),
3992 Primitive::kPrimInt,
3993 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
3994 loc.ToLow(),
3995 Primitive::kPrimInt);
3996 } else if (shift > 32) {
3997 // Low part becomes 0. High part is low part << (shift-32).
3998 __ movl(high, low);
3999 __ shll(high, Immediate(shift - 32));
4000 __ xorl(low, low);
4001 } else {
4002 // Between 1 and 31.
4003 __ shld(high, low, Immediate(shift));
4004 __ shll(low, Immediate(shift));
4005 }
4006}
4007
Calin Juravle9aec02f2014-11-18 23:06:35 +00004008void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004009 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004010 __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
4011 __ shll(loc.AsRegisterPairLow<Register>(), shifter);
4012 __ testl(shifter, Immediate(32));
4013 __ j(kEqual, &done);
4014 __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
4015 __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
4016 __ Bind(&done);
4017}
4018
Mark P Mendell73945692015-04-29 14:56:17 +00004019void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, int shift) {
4020 Register low = loc.AsRegisterPairLow<Register>();
4021 Register high = loc.AsRegisterPairHigh<Register>();
4022 if (shift == 32) {
4023 // Need to copy the sign.
4024 DCHECK_NE(low, high);
4025 __ movl(low, high);
4026 __ sarl(high, Immediate(31));
4027 } else if (shift > 32) {
4028 DCHECK_NE(low, high);
4029 // High part becomes sign. Low part is shifted by shift - 32.
4030 __ movl(low, high);
4031 __ sarl(high, Immediate(31));
4032 __ sarl(low, Immediate(shift - 32));
4033 } else {
4034 // Between 1 and 31.
4035 __ shrd(low, high, Immediate(shift));
4036 __ sarl(high, Immediate(shift));
4037 }
4038}
4039
Calin Juravle9aec02f2014-11-18 23:06:35 +00004040void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004041 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004042 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4043 __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
4044 __ testl(shifter, Immediate(32));
4045 __ j(kEqual, &done);
4046 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4047 __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
4048 __ Bind(&done);
4049}
4050
Mark P Mendell73945692015-04-29 14:56:17 +00004051void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, int shift) {
4052 Register low = loc.AsRegisterPairLow<Register>();
4053 Register high = loc.AsRegisterPairHigh<Register>();
4054 if (shift == 32) {
4055 // Shift by 32 is easy. Low gets high, and high gets 0.
4056 codegen_->EmitParallelMoves(
4057 loc.ToHigh(),
4058 loc.ToLow(),
4059 Primitive::kPrimInt,
4060 Location::ConstantLocation(GetGraph()->GetIntConstant(0)),
4061 loc.ToHigh(),
4062 Primitive::kPrimInt);
4063 } else if (shift > 32) {
4064 // Low part is high >> (shift - 32). High part becomes 0.
4065 __ movl(low, high);
4066 __ shrl(low, Immediate(shift - 32));
4067 __ xorl(high, high);
4068 } else {
4069 // Between 1 and 31.
4070 __ shrd(low, high, Immediate(shift));
4071 __ shrl(high, Immediate(shift));
4072 }
4073}
4074
Calin Juravle9aec02f2014-11-18 23:06:35 +00004075void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004076 NearLabel done;
Calin Juravle9aec02f2014-11-18 23:06:35 +00004077 __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
4078 __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
4079 __ testl(shifter, Immediate(32));
4080 __ j(kEqual, &done);
4081 __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
4082 __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
4083 __ Bind(&done);
4084}
4085
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004086void LocationsBuilderX86::VisitRor(HRor* ror) {
4087 LocationSummary* locations =
4088 new (GetGraph()->GetArena()) LocationSummary(ror, LocationSummary::kNoCall);
4089
4090 switch (ror->GetResultType()) {
4091 case Primitive::kPrimLong:
4092 // Add the temporary needed.
4093 locations->AddTemp(Location::RequiresRegister());
4094 FALLTHROUGH_INTENDED;
4095 case Primitive::kPrimInt:
4096 locations->SetInAt(0, Location::RequiresRegister());
4097 // The shift count needs to be in CL (unless it is a constant).
4098 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, ror->InputAt(1)));
4099 locations->SetOut(Location::SameAsFirstInput());
4100 break;
4101 default:
4102 LOG(FATAL) << "Unexpected operation type " << ror->GetResultType();
4103 UNREACHABLE();
4104 }
4105}
4106
4107void InstructionCodeGeneratorX86::VisitRor(HRor* ror) {
4108 LocationSummary* locations = ror->GetLocations();
4109 Location first = locations->InAt(0);
4110 Location second = locations->InAt(1);
4111
4112 if (ror->GetResultType() == Primitive::kPrimInt) {
4113 Register first_reg = first.AsRegister<Register>();
4114 if (second.IsRegister()) {
4115 Register second_reg = second.AsRegister<Register>();
4116 __ rorl(first_reg, second_reg);
4117 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004118 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftDistance);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004119 __ rorl(first_reg, imm);
4120 }
4121 return;
4122 }
4123
4124 DCHECK_EQ(ror->GetResultType(), Primitive::kPrimLong);
4125 Register first_reg_lo = first.AsRegisterPairLow<Register>();
4126 Register first_reg_hi = first.AsRegisterPairHigh<Register>();
4127 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
4128 if (second.IsRegister()) {
4129 Register second_reg = second.AsRegister<Register>();
4130 DCHECK_EQ(second_reg, ECX);
4131 __ movl(temp_reg, first_reg_hi);
4132 __ shrd(first_reg_hi, first_reg_lo, second_reg);
4133 __ shrd(first_reg_lo, temp_reg, second_reg);
4134 __ movl(temp_reg, first_reg_hi);
4135 __ testl(second_reg, Immediate(32));
4136 __ cmovl(kNotEqual, first_reg_hi, first_reg_lo);
4137 __ cmovl(kNotEqual, first_reg_lo, temp_reg);
4138 } else {
Roland Levillain5b5b9312016-03-22 14:57:31 +00004139 int32_t shift_amt = second.GetConstant()->AsIntConstant()->GetValue() & kMaxLongShiftDistance;
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004140 if (shift_amt == 0) {
4141 // Already fine.
4142 return;
4143 }
4144 if (shift_amt == 32) {
4145 // Just swap.
4146 __ movl(temp_reg, first_reg_lo);
4147 __ movl(first_reg_lo, first_reg_hi);
4148 __ movl(first_reg_hi, temp_reg);
4149 return;
4150 }
4151
4152 Immediate imm(shift_amt);
4153 // Save the constents of the low value.
4154 __ movl(temp_reg, first_reg_lo);
4155
4156 // Shift right into low, feeding bits from high.
4157 __ shrd(first_reg_lo, first_reg_hi, imm);
4158
4159 // Shift right into high, feeding bits from the original low.
4160 __ shrd(first_reg_hi, temp_reg, imm);
4161
4162 // Swap if needed.
4163 if (shift_amt > 32) {
4164 __ movl(temp_reg, first_reg_lo);
4165 __ movl(first_reg_lo, first_reg_hi);
4166 __ movl(first_reg_hi, temp_reg);
4167 }
4168 }
4169}
4170
Calin Juravle9aec02f2014-11-18 23:06:35 +00004171void LocationsBuilderX86::VisitShl(HShl* shl) {
4172 HandleShift(shl);
4173}
4174
4175void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
4176 HandleShift(shl);
4177}
4178
4179void LocationsBuilderX86::VisitShr(HShr* shr) {
4180 HandleShift(shr);
4181}
4182
4183void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
4184 HandleShift(shr);
4185}
4186
4187void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
4188 HandleShift(ushr);
4189}
4190
4191void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
4192 HandleShift(ushr);
4193}
4194
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004195void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004196 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004197 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004198 locations->SetOut(Location::RegisterLocation(EAX));
David Brazdil6de19382016-01-08 17:37:10 +00004199 if (instruction->IsStringAlloc()) {
4200 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
4201 } else {
4202 InvokeRuntimeCallingConvention calling_convention;
4203 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00004204 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004205}
4206
4207void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004208 // Note: if heap poisoning is enabled, the entry point takes cares
4209 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00004210 if (instruction->IsStringAlloc()) {
4211 // String is allocated through StringFactory. Call NewEmptyString entry point.
4212 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07004213 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00004214 __ fs()->movl(temp, Address::Absolute(QUICK_ENTRY_POINT(pNewEmptyString)));
4215 __ call(Address(temp, code_offset.Int32Value()));
4216 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
4217 } else {
Serban Constantinescuba45db02016-07-12 22:53:02 +01004218 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00004219 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00004220 DCHECK(!codegen_->IsLeafMethod());
4221 }
Nicolas Geoffray2e7038a2014-04-03 18:49:58 +01004222}
4223
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004224void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
4225 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004226 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004227 locations->SetOut(Location::RegisterLocation(EAX));
4228 InvokeRuntimeCallingConvention calling_convention;
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004229 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4230 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004231}
4232
4233void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
Roland Levillain4d027112015-07-01 15:41:14 +01004234 // Note: if heap poisoning is enabled, the entry point takes cares
4235 // of poisoning the reference.
Nicolas Geoffrayd0958442017-01-30 14:57:16 +00004236 QuickEntrypointEnum entrypoint =
4237 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
4238 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00004239 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Nicolas Geoffraya3d05a42014-10-20 17:41:32 +01004240 DCHECK(!codegen_->IsLeafMethod());
4241}
4242
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004243void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004244 LocationSummary* locations =
4245 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004246 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
4247 if (location.IsStackSlot()) {
4248 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
4249 } else if (location.IsDoubleStackSlot()) {
4250 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004251 }
Nicolas Geoffraya747a392014-04-17 14:56:23 +01004252 locations->SetOut(location);
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004253}
4254
Nicolas Geoffray76b1e172015-05-27 17:18:33 +01004255void InstructionCodeGeneratorX86::VisitParameterValue(
4256 HParameterValue* instruction ATTRIBUTE_UNUSED) {
4257}
4258
4259void LocationsBuilderX86::VisitCurrentMethod(HCurrentMethod* instruction) {
4260 LocationSummary* locations =
4261 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4262 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
4263}
4264
4265void InstructionCodeGeneratorX86::VisitCurrentMethod(HCurrentMethod* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffrayf583e592014-04-07 13:20:42 +01004266}
4267
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004268void LocationsBuilderX86::VisitClassTableGet(HClassTableGet* instruction) {
4269 LocationSummary* locations =
4270 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4271 locations->SetInAt(0, Location::RequiresRegister());
4272 locations->SetOut(Location::RequiresRegister());
4273}
4274
4275void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction) {
4276 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00004277 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004278 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004279 instruction->GetIndex(), kX86PointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004280 __ movl(locations->Out().AsRegister<Register>(),
4281 Address(locations->InAt(0).AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004282 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004283 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004284 instruction->GetIndex(), kX86PointerSize));
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01004285 __ movl(locations->Out().AsRegister<Register>(),
4286 Address(locations->InAt(0).AsRegister<Register>(),
4287 mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value()));
4288 // temp = temp->GetImtEntryAt(method_offset);
4289 __ movl(locations->Out().AsRegister<Register>(),
4290 Address(locations->Out().AsRegister<Register>(), method_offset));
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004291 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004292}
4293
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004294void LocationsBuilderX86::VisitNot(HNot* not_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004295 LocationSummary* locations =
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004296 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004297 locations->SetInAt(0, Location::RequiresRegister());
4298 locations->SetOut(Location::SameAsFirstInput());
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004299}
4300
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004301void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
4302 LocationSummary* locations = not_->GetLocations();
Roland Levillain70566432014-10-24 16:20:17 +01004303 Location in = locations->InAt(0);
Nicolas Geoffraya7aca372014-04-28 17:47:12 +01004304 Location out = locations->Out();
Roland Levillain70566432014-10-24 16:20:17 +01004305 DCHECK(in.Equals(out));
Nicolas Geoffrayd8ef2e92015-02-24 16:02:06 +00004306 switch (not_->GetResultType()) {
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004307 case Primitive::kPrimInt:
Roland Levillain271ab9c2014-11-27 15:23:57 +00004308 __ notl(out.AsRegister<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004309 break;
4310
4311 case Primitive::kPrimLong:
Roland Levillain70566432014-10-24 16:20:17 +01004312 __ notl(out.AsRegisterPairLow<Register>());
4313 __ notl(out.AsRegisterPairHigh<Register>());
Roland Levillain1cc5f2512014-10-22 18:06:21 +01004314 break;
4315
4316 default:
4317 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
4318 }
Nicolas Geoffrayb55f8352014-04-07 15:26:35 +01004319}
4320
David Brazdil66d126e2015-04-03 16:02:44 +01004321void LocationsBuilderX86::VisitBooleanNot(HBooleanNot* bool_not) {
4322 LocationSummary* locations =
4323 new (GetGraph()->GetArena()) LocationSummary(bool_not, LocationSummary::kNoCall);
4324 locations->SetInAt(0, Location::RequiresRegister());
4325 locations->SetOut(Location::SameAsFirstInput());
4326}
4327
4328void InstructionCodeGeneratorX86::VisitBooleanNot(HBooleanNot* bool_not) {
David Brazdil66d126e2015-04-03 16:02:44 +01004329 LocationSummary* locations = bool_not->GetLocations();
4330 Location in = locations->InAt(0);
4331 Location out = locations->Out();
4332 DCHECK(in.Equals(out));
4333 __ xorl(out.AsRegister<Register>(), Immediate(1));
4334}
4335
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004336void LocationsBuilderX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004337 LocationSummary* locations =
4338 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Calin Juravleddb7df22014-11-25 20:56:51 +00004339 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004340 case Primitive::kPrimBoolean:
4341 case Primitive::kPrimByte:
4342 case Primitive::kPrimShort:
4343 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004344 case Primitive::kPrimInt:
Calin Juravleddb7df22014-11-25 20:56:51 +00004345 case Primitive::kPrimLong: {
4346 locations->SetInAt(0, Location::RequiresRegister());
Calin Juravleddb7df22014-11-25 20:56:51 +00004347 locations->SetInAt(1, Location::Any());
4348 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4349 break;
4350 }
4351 case Primitive::kPrimFloat:
4352 case Primitive::kPrimDouble: {
4353 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004354 if (compare->InputAt(1)->IsX86LoadFromConstantTable()) {
4355 DCHECK(compare->InputAt(1)->IsEmittedAtUseSite());
4356 } else if (compare->InputAt(1)->IsConstant()) {
4357 locations->SetInAt(1, Location::RequiresFpuRegister());
4358 } else {
4359 locations->SetInAt(1, Location::Any());
4360 }
Calin Juravleddb7df22014-11-25 20:56:51 +00004361 locations->SetOut(Location::RequiresRegister());
4362 break;
4363 }
4364 default:
4365 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
4366 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004367}
4368
4369void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004370 LocationSummary* locations = compare->GetLocations();
Roland Levillain271ab9c2014-11-27 15:23:57 +00004371 Register out = locations->Out().AsRegister<Register>();
Calin Juravleddb7df22014-11-25 20:56:51 +00004372 Location left = locations->InAt(0);
4373 Location right = locations->InAt(1);
4374
Mark Mendell0c9497d2015-08-21 09:30:05 -04004375 NearLabel less, greater, done;
Aart Bika19616e2016-02-01 18:57:58 -08004376 Condition less_cond = kLess;
4377
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004378 switch (compare->InputAt(0)->GetType()) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00004379 case Primitive::kPrimBoolean:
4380 case Primitive::kPrimByte:
4381 case Primitive::kPrimShort:
4382 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08004383 case Primitive::kPrimInt: {
Roland Levillain0b671c02016-08-19 12:02:34 +01004384 codegen_->GenerateIntCompare(left, right);
Aart Bika19616e2016-02-01 18:57:58 -08004385 break;
4386 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004387 case Primitive::kPrimLong: {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004388 Register left_low = left.AsRegisterPairLow<Register>();
4389 Register left_high = left.AsRegisterPairHigh<Register>();
4390 int32_t val_low = 0;
4391 int32_t val_high = 0;
4392 bool right_is_const = false;
4393
4394 if (right.IsConstant()) {
4395 DCHECK(right.GetConstant()->IsLongConstant());
4396 right_is_const = true;
4397 int64_t val = right.GetConstant()->AsLongConstant()->GetValue();
4398 val_low = Low32Bits(val);
4399 val_high = High32Bits(val);
4400 }
4401
Calin Juravleddb7df22014-11-25 20:56:51 +00004402 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004403 __ cmpl(left_high, right.AsRegisterPairHigh<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004404 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004405 __ cmpl(left_high, Address(ESP, right.GetHighStackIndex(kX86WordSize)));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004406 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004407 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004408 codegen_->Compare32BitValue(left_high, val_high);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004409 }
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004410 __ j(kLess, &less); // Signed compare.
4411 __ j(kGreater, &greater); // Signed compare.
Nicolas Geoffray56b9ee62014-10-09 11:47:51 +01004412 if (right.IsRegisterPair()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004413 __ cmpl(left_low, right.AsRegisterPairLow<Register>());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004414 } else if (right.IsDoubleStackSlot()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004415 __ cmpl(left_low, Address(ESP, right.GetStackIndex()));
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004416 } else {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04004417 DCHECK(right_is_const) << right;
Aart Bika19616e2016-02-01 18:57:58 -08004418 codegen_->Compare32BitValue(left_low, val_low);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004419 }
Aart Bika19616e2016-02-01 18:57:58 -08004420 less_cond = kBelow; // for CF (unsigned).
Calin Juravleddb7df22014-11-25 20:56:51 +00004421 break;
4422 }
4423 case Primitive::kPrimFloat: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004424 GenerateFPCompare(left, right, compare, false);
Calin Juravleddb7df22014-11-25 20:56:51 +00004425 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004426 less_cond = kBelow; // for CF (floats).
Calin Juravleddb7df22014-11-25 20:56:51 +00004427 break;
4428 }
4429 case Primitive::kPrimDouble: {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +00004430 GenerateFPCompare(left, right, compare, true);
Calin Juravleddb7df22014-11-25 20:56:51 +00004431 __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
Aart Bika19616e2016-02-01 18:57:58 -08004432 less_cond = kBelow; // for CF (floats).
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004433 break;
4434 }
4435 default:
Calin Juravleddb7df22014-11-25 20:56:51 +00004436 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004437 }
Aart Bika19616e2016-02-01 18:57:58 -08004438
Calin Juravleddb7df22014-11-25 20:56:51 +00004439 __ movl(out, Immediate(0));
4440 __ j(kEqual, &done);
Aart Bika19616e2016-02-01 18:57:58 -08004441 __ j(less_cond, &less);
Calin Juravleddb7df22014-11-25 20:56:51 +00004442
4443 __ Bind(&greater);
4444 __ movl(out, Immediate(1));
4445 __ jmp(&done);
4446
4447 __ Bind(&less);
4448 __ movl(out, Immediate(-1));
4449
4450 __ Bind(&done);
Nicolas Geoffray412f10c2014-06-19 10:00:34 +01004451}
4452
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004453void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01004454 LocationSummary* locations =
4455 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
Vladimir Marko372f10e2016-05-17 16:30:10 +01004456 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Nicolas Geoffray31d76b42014-06-09 15:02:22 +01004457 locations->SetInAt(i, Location::Any());
4458 }
4459 locations->SetOut(Location::Any());
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004460}
4461
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01004462void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01004463 LOG(FATAL) << "Unreachable";
Nicolas Geoffrayc32e7702014-04-24 12:43:16 +01004464}
4465
Roland Levillain7c1559a2015-12-15 10:55:36 +00004466void CodeGeneratorX86::GenerateMemoryBarrier(MemBarrierKind kind) {
Calin Juravle52c48962014-12-16 17:02:57 +00004467 /*
4468 * According to the JSR-133 Cookbook, for x86 only StoreLoad/AnyAny barriers need memory fence.
4469 * All other barriers (LoadAny, AnyStore, StoreStore) are nops due to the x86 memory model.
4470 * For those cases, all we need to ensure is that there is a scheduling barrier in place.
4471 */
4472 switch (kind) {
4473 case MemBarrierKind::kAnyAny: {
Mark P Mendell17077d82015-12-16 19:15:59 +00004474 MemoryFence();
Calin Juravle52c48962014-12-16 17:02:57 +00004475 break;
4476 }
4477 case MemBarrierKind::kAnyStore:
4478 case MemBarrierKind::kLoadAny:
4479 case MemBarrierKind::kStoreStore: {
4480 // nop
4481 break;
4482 }
Mark Mendell7aa04a12016-01-27 22:39:07 -05004483 case MemBarrierKind::kNTStoreStore:
4484 // Non-Temporal Store/Store needs an explicit fence.
4485 MemoryFence(/* non-temporal */ true);
4486 break;
Nicolas Geoffray1a43dd72014-07-17 15:15:34 +01004487 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004488}
4489
Vladimir Markodc151b22015-10-15 18:02:30 +01004490HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
4491 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004492 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004493 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004494}
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004495
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004496Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
4497 Register temp) {
4498 DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
Vladimir Markoc53c0792015-11-19 15:48:33 +00004499 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004500 if (!invoke->GetLocations()->Intrinsified()) {
4501 return location.AsRegister<Register>();
4502 }
4503 // For intrinsics we allow any location, so it may be on the stack.
4504 if (!location.IsRegister()) {
4505 __ movl(temp, Address(ESP, location.GetStackIndex()));
4506 return temp;
4507 }
4508 // For register locations, check if the register was saved. If so, get it from the stack.
4509 // Note: There is a chance that the register was saved but not overwritten, so we could
4510 // save one load. However, since this is just an intrinsic slow path we prefer this
4511 // simple and more robust approach rather that trying to determine if that's the case.
4512 SlowPathCode* slow_path = GetCurrentSlowPath();
Serguei Katkov288c7a82016-05-16 11:53:15 +06004513 if (slow_path != nullptr) {
4514 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
4515 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
4516 __ movl(temp, Address(ESP, stack_offset));
4517 return temp;
4518 }
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004519 }
4520 return location.AsRegister<Register>();
4521}
4522
Serguei Katkov288c7a82016-05-16 11:53:15 +06004523Location CodeGeneratorX86::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
4524 Location temp) {
Vladimir Marko58155012015-08-19 12:49:41 +00004525 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
4526 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004527 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004528 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004529 uint32_t offset =
4530 GetThreadOffset<kX86PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
4531 __ fs()->movl(temp.AsRegister<Register>(), Address::Absolute(offset));
Vladimir Marko58155012015-08-19 12:49:41 +00004532 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004533 }
Vladimir Marko58155012015-08-19 12:49:41 +00004534 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004535 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004536 break;
4537 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
4538 __ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
4539 break;
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004540 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4541 Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
4542 temp.AsRegister<Register>());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004543 __ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004544 // Bind a new fixup label at the end of the "movl" insn.
4545 uint32_t offset = invoke->GetDexCacheArrayOffset();
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004546 __ Bind(NewPcRelativeDexCacheArrayPatch(
4547 invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(),
4548 invoke->GetDexFileForPcRelativeDexCache(),
4549 offset));
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004550 break;
4551 }
Vladimir Marko58155012015-08-19 12:49:41 +00004552 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004553 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004554 Register method_reg;
4555 Register reg = temp.AsRegister<Register>();
4556 if (current_method.IsRegister()) {
4557 method_reg = current_method.AsRegister<Register>();
4558 } else {
David Brazdil58282f42016-01-14 12:45:10 +00004559 DCHECK(invoke->GetLocations()->Intrinsified());
Vladimir Marko58155012015-08-19 12:49:41 +00004560 DCHECK(!current_method.IsValid());
4561 method_reg = reg;
4562 __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
4563 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00004564 // /* ArtMethod*[] */ temp = temp.ptr_sized_fields_->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004565 __ movl(reg, Address(method_reg,
4566 ArtMethod::DexCacheResolvedMethodsOffset(kX86PointerSize).Int32Value()));
Vladimir Marko40ecb122016-04-06 17:33:41 +01004567 // temp = temp[index_in_cache];
4568 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4569 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004570 __ movl(reg, Address(reg, CodeGenerator::GetCachePointerOffset(index_in_cache)));
4571 break;
Vladimir Marko9b688a02015-05-06 14:12:42 +01004572 }
Vladimir Marko58155012015-08-19 12:49:41 +00004573 }
Serguei Katkov288c7a82016-05-16 11:53:15 +06004574 return callee_method;
4575}
4576
4577void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4578 Location callee_method = GenerateCalleeMethodStaticOrDirectCall(invoke, temp);
Vladimir Marko58155012015-08-19 12:49:41 +00004579
4580 switch (invoke->GetCodePtrLocation()) {
4581 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
4582 __ call(GetFrameEntryLabel());
4583 break;
Vladimir Marko58155012015-08-19 12:49:41 +00004584 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
4585 // (callee_method + offset_of_quick_compiled_code)()
4586 __ call(Address(callee_method.AsRegister<Register>(),
4587 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07004588 kX86PointerSize).Int32Value()));
Vladimir Marko58155012015-08-19 12:49:41 +00004589 break;
Mark Mendell09ed1a32015-03-25 08:30:06 -04004590 }
4591
4592 DCHECK(!IsLeafMethod());
Mark Mendell09ed1a32015-03-25 08:30:06 -04004593}
4594
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004595void CodeGeneratorX86::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_in) {
4596 Register temp = temp_in.AsRegister<Register>();
4597 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
4598 invoke->GetVTableIndex(), kX86PointerSize).Uint32Value();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004599
4600 // Use the calling convention instead of the location of the receiver, as
4601 // intrinsics may have put the receiver in a different register. In the intrinsics
4602 // slow path, the arguments have been moved to the right place, so here we are
4603 // guaranteed that the receiver is the first register of the calling convention.
4604 InvokeDexCallingConvention calling_convention;
4605 Register receiver = calling_convention.GetRegisterAt(0);
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004606 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004607 // /* HeapReference<Class> */ temp = receiver->klass_
Nicolas Geoffraye5234232015-12-02 09:06:11 +00004608 __ movl(temp, Address(receiver, class_offset));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004609 MaybeRecordImplicitNullCheck(invoke);
Roland Levillain0d5a2812015-11-13 10:07:31 +00004610 // Instead of simply (possibly) unpoisoning `temp` here, we should
4611 // emit a read barrier for the previous class reference load.
4612 // However this is not required in practice, as this is an
4613 // intermediate/temporary reference and because the current
4614 // concurrent copying collector keeps the from-space memory
4615 // intact/accessible until the end of the marking phase (the
4616 // concurrent copying collector may not in the future).
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004617 __ MaybeUnpoisonHeapReference(temp);
4618 // temp = temp->GetMethodAt(method_offset);
4619 __ movl(temp, Address(temp, method_offset));
4620 // call temp->GetEntryPoint();
4621 __ call(Address(
Andreas Gampe542451c2016-07-26 09:02:02 -07004622 temp, ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86PointerSize).Int32Value()));
Andreas Gampebfb5ba92015-09-01 15:45:02 +00004623}
4624
Vladimir Markoaad75c62016-10-03 08:46:48 +00004625void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
4626 DCHECK(GetCompilerOptions().IsBootImage());
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004627 HX86ComputeBaseMethodAddress* address = nullptr;
4628 if (GetCompilerOptions().GetCompilePic()) {
4629 address = load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
4630 } else {
4631 DCHECK_EQ(load_string->InputCount(), 0u);
4632 }
4633 string_patches_.emplace_back(address,
4634 load_string->GetDexFile(),
4635 load_string->GetStringIndex().index_);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004636 __ Bind(&string_patches_.back().label);
4637}
4638
Vladimir Marko1998cd02017-01-13 13:02:58 +00004639void CodeGeneratorX86::RecordBootTypePatch(HLoadClass* load_class) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004640 HX86ComputeBaseMethodAddress* address = nullptr;
4641 if (GetCompilerOptions().GetCompilePic()) {
4642 address = load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4643 } else {
4644 DCHECK_EQ(load_class->InputCount(), 0u);
4645 }
4646 boot_image_type_patches_.emplace_back(address,
4647 load_class->GetDexFile(),
Vladimir Marko1998cd02017-01-13 13:02:58 +00004648 load_class->GetTypeIndex().index_);
4649 __ Bind(&boot_image_type_patches_.back().label);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004650}
4651
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004652Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004653 HX86ComputeBaseMethodAddress* address =
4654 load_class->InputAt(0)->AsX86ComputeBaseMethodAddress();
4655 type_bss_entry_patches_.emplace_back(
4656 address, load_class->GetDexFile(), load_class->GetTypeIndex().index_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00004657 return &type_bss_entry_patches_.back().label;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004658}
4659
Vladimir Markoaad75c62016-10-03 08:46:48 +00004660Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
4661 DCHECK(!GetCompilerOptions().IsBootImage());
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004662 HX86ComputeBaseMethodAddress* address =
4663 load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
4664 string_patches_.emplace_back(
4665 address, load_string->GetDexFile(), load_string->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004666 return &string_patches_.back().label;
4667}
4668
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004669Label* CodeGeneratorX86::NewPcRelativeDexCacheArrayPatch(
4670 HX86ComputeBaseMethodAddress* method_address,
4671 const DexFile& dex_file,
4672 uint32_t element_offset) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004673 // Add the patch entry and bind its label at the end of the instruction.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004674 pc_relative_dex_cache_patches_.emplace_back(method_address, dex_file, element_offset);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004675 return &pc_relative_dex_cache_patches_.back().label;
4676}
4677
Vladimir Markoaad75c62016-10-03 08:46:48 +00004678// The label points to the end of the "movl" or another instruction but the literal offset
4679// for method patch needs to point to the embedded constant which occupies the last 4 bytes.
4680constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u;
4681
4682template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
4683inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches(
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004684 const ArenaDeque<X86PcRelativePatchInfo>& infos,
Vladimir Markoaad75c62016-10-03 08:46:48 +00004685 ArenaVector<LinkerPatch>* linker_patches) {
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004686 for (const X86PcRelativePatchInfo& info : infos) {
Vladimir Markoaad75c62016-10-03 08:46:48 +00004687 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00004688 linker_patches->push_back(Factory(
4689 literal_offset, &info.dex_file, GetMethodAddressOffset(info.method_address), info.index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00004690 }
4691}
4692
Vladimir Marko58155012015-08-19 12:49:41 +00004693void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
4694 DCHECK(linker_patches->empty());
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004695 size_t size =
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004696 pc_relative_dex_cache_patches_.size() +
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004697 string_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00004698 boot_image_type_patches_.size() +
4699 type_bss_entry_patches_.size();
Vladimir Marko0f7dca42015-11-02 14:36:43 +00004700 linker_patches->reserve(size);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004701 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
4702 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004703 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004704 DCHECK(boot_image_type_patches_.empty());
Vladimir Markoaad75c62016-10-03 08:46:48 +00004705 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
4706 } else if (GetCompilerOptions().GetCompilePic()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004707 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
4708 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004709 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00004710 } else {
Vladimir Marko1998cd02017-01-13 13:02:58 +00004711 for (const PatchInfo<Label>& info : boot_image_type_patches_) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004712 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
Vladimir Markoaad75c62016-10-03 08:46:48 +00004713 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004714 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004715 for (const PatchInfo<Label>& info : string_patches_) {
4716 uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
4717 linker_patches->push_back(
4718 LinkerPatch::StringPatch(literal_offset, &info.dex_file, info.index));
4719 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004720 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00004721 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
4722 linker_patches);
4723 DCHECK_EQ(size, linker_patches->size());
Vladimir Marko58155012015-08-19 12:49:41 +00004724}
4725
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004726void CodeGeneratorX86::MarkGCCard(Register temp,
4727 Register card,
4728 Register object,
4729 Register value,
4730 bool value_can_be_null) {
Mark Mendell0c9497d2015-08-21 09:30:05 -04004731 NearLabel is_null;
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004732 if (value_can_be_null) {
4733 __ testl(value, value);
4734 __ j(kEqual, &is_null);
4735 }
Andreas Gampe542451c2016-07-26 09:02:02 -07004736 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86PointerSize>().Int32Value()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004737 __ movl(temp, object);
4738 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
Nicolas Geoffray5b4b8982014-12-18 17:45:56 +00004739 __ movb(Address(temp, card, TIMES_1, 0),
4740 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004741 if (value_can_be_null) {
4742 __ Bind(&is_null);
4743 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01004744}
4745
Calin Juravle52c48962014-12-16 17:02:57 +00004746void LocationsBuilderX86::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
4747 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Roland Levillain0d5a2812015-11-13 10:07:31 +00004748
4749 bool object_field_get_with_read_barrier =
4750 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01004751 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00004752 new (GetGraph()->GetArena()) LocationSummary(instruction,
4753 kEmitCompilerReadBarrier ?
4754 LocationSummary::kCallOnSlowPath :
4755 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01004756 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01004757 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01004758 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01004759 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004760
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004761 if (Primitive::IsFloatingPointType(instruction->GetType())) {
4762 locations->SetOut(Location::RequiresFpuRegister());
4763 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00004764 // The output overlaps in case of long: we don't want the low move
4765 // to overwrite the object's location. Likewise, in the case of
4766 // an object field get with read barriers enabled, we do not want
4767 // the move to overwrite the object's location, as we need it to emit
4768 // the read barrier.
4769 locations->SetOut(
4770 Location::RequiresRegister(),
4771 (object_field_get_with_read_barrier || instruction->GetType() == Primitive::kPrimLong) ?
4772 Location::kOutputOverlap :
4773 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004774 }
Calin Juravle52c48962014-12-16 17:02:57 +00004775
4776 if (field_info.IsVolatile() && (field_info.GetFieldType() == Primitive::kPrimLong)) {
4777 // Long values can be loaded atomically into an XMM using movsd.
Roland Levillain7c1559a2015-12-15 10:55:36 +00004778 // So we use an XMM register as a temp to achieve atomicity (first
4779 // load the temp into the XMM and then copy the XMM into the
4780 // output, 32 bits at a time).
Calin Juravle52c48962014-12-16 17:02:57 +00004781 locations->AddTemp(Location::RequiresFpuRegister());
4782 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004783}
4784
Calin Juravle52c48962014-12-16 17:02:57 +00004785void InstructionCodeGeneratorX86::HandleFieldGet(HInstruction* instruction,
4786 const FieldInfo& field_info) {
4787 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004788
Calin Juravle52c48962014-12-16 17:02:57 +00004789 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00004790 Location base_loc = locations->InAt(0);
4791 Register base = base_loc.AsRegister<Register>();
Calin Juravle52c48962014-12-16 17:02:57 +00004792 Location out = locations->Out();
4793 bool is_volatile = field_info.IsVolatile();
4794 Primitive::Type field_type = field_info.GetFieldType();
4795 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4796
4797 switch (field_type) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004798 case Primitive::kPrimBoolean: {
Calin Juravle52c48962014-12-16 17:02:57 +00004799 __ movzxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004800 break;
4801 }
4802
4803 case Primitive::kPrimByte: {
Calin Juravle52c48962014-12-16 17:02:57 +00004804 __ movsxb(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004805 break;
4806 }
4807
4808 case Primitive::kPrimShort: {
Calin Juravle52c48962014-12-16 17:02:57 +00004809 __ movsxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004810 break;
4811 }
4812
4813 case Primitive::kPrimChar: {
Calin Juravle52c48962014-12-16 17:02:57 +00004814 __ movzxw(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004815 break;
4816 }
4817
4818 case Primitive::kPrimInt:
Calin Juravle52c48962014-12-16 17:02:57 +00004819 __ movl(out.AsRegister<Register>(), Address(base, offset));
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004820 break;
Roland Levillain7c1559a2015-12-15 10:55:36 +00004821
4822 case Primitive::kPrimNot: {
4823 // /* HeapReference<Object> */ out = *(base + offset)
4824 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004825 // Note that a potential implicit null check is handled in this
4826 // CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier call.
4827 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00004828 instruction, out, base, offset, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00004829 if (is_volatile) {
4830 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4831 }
4832 } else {
4833 __ movl(out.AsRegister<Register>(), Address(base, offset));
4834 codegen_->MaybeRecordImplicitNullCheck(instruction);
4835 if (is_volatile) {
4836 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4837 }
4838 // If read barriers are enabled, emit read barriers other than
4839 // Baker's using a slow path (and also unpoison the loaded
4840 // reference, if heap poisoning is enabled).
4841 codegen_->MaybeGenerateReadBarrierSlow(instruction, out, out, base_loc, offset);
4842 }
4843 break;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004844 }
4845
4846 case Primitive::kPrimLong: {
Calin Juravle52c48962014-12-16 17:02:57 +00004847 if (is_volatile) {
4848 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
4849 __ movsd(temp, Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004850 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004851 __ movd(out.AsRegisterPairLow<Register>(), temp);
4852 __ psrlq(temp, Immediate(32));
4853 __ movd(out.AsRegisterPairHigh<Register>(), temp);
4854 } else {
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00004855 DCHECK_NE(base, out.AsRegisterPairLow<Register>());
Calin Juravle52c48962014-12-16 17:02:57 +00004856 __ movl(out.AsRegisterPairLow<Register>(), Address(base, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00004857 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00004858 __ movl(out.AsRegisterPairHigh<Register>(), Address(base, kX86WordSize + offset));
4859 }
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004860 break;
4861 }
4862
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004863 case Primitive::kPrimFloat: {
Calin Juravle52c48962014-12-16 17:02:57 +00004864 __ movss(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004865 break;
4866 }
4867
4868 case Primitive::kPrimDouble: {
Calin Juravle52c48962014-12-16 17:02:57 +00004869 __ movsd(out.AsFpuRegister<XmmRegister>(), Address(base, offset));
Nicolas Geoffray52e832b2014-11-06 15:15:31 +00004870 break;
4871 }
4872
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004873 case Primitive::kPrimVoid:
Calin Juravle52c48962014-12-16 17:02:57 +00004874 LOG(FATAL) << "Unreachable type " << field_type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07004875 UNREACHABLE();
Nicolas Geoffraye5038322014-07-04 09:41:32 +01004876 }
Calin Juravle52c48962014-12-16 17:02:57 +00004877
Roland Levillain7c1559a2015-12-15 10:55:36 +00004878 if (field_type == Primitive::kPrimNot || field_type == Primitive::kPrimLong) {
4879 // Potential implicit null checks, in the case of reference or
4880 // long fields, are handled in the previous switch statement.
4881 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00004882 codegen_->MaybeRecordImplicitNullCheck(instruction);
4883 }
4884
Calin Juravle52c48962014-12-16 17:02:57 +00004885 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004886 if (field_type == Primitive::kPrimNot) {
4887 // Memory barriers, in the case of references, are also handled
4888 // in the previous switch statement.
4889 } else {
4890 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4891 }
Roland Levillain4d027112015-07-01 15:41:14 +01004892 }
Calin Juravle52c48962014-12-16 17:02:57 +00004893}
4894
4895void LocationsBuilderX86::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
4896 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4897
4898 LocationSummary* locations =
4899 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4900 locations->SetInAt(0, Location::RequiresRegister());
4901 bool is_volatile = field_info.IsVolatile();
4902 Primitive::Type field_type = field_info.GetFieldType();
4903 bool is_byte_type = (field_type == Primitive::kPrimBoolean)
4904 || (field_type == Primitive::kPrimByte);
4905
4906 // The register allocator does not support multiple
4907 // inputs that die at entry with one in a specific register.
4908 if (is_byte_type) {
4909 // Ensure the value is in a byte register.
4910 locations->SetInAt(1, Location::RegisterLocation(EAX));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01004911 } else if (Primitive::IsFloatingPointType(field_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05004912 if (is_volatile && field_type == Primitive::kPrimDouble) {
4913 // In order to satisfy the semantics of volatile, this must be a single instruction store.
4914 locations->SetInAt(1, Location::RequiresFpuRegister());
4915 } else {
4916 locations->SetInAt(1, Location::FpuRegisterOrConstant(instruction->InputAt(1)));
4917 }
4918 } else if (is_volatile && field_type == Primitive::kPrimLong) {
4919 // In order to satisfy the semantics of volatile, this must be a single instruction store.
Calin Juravle52c48962014-12-16 17:02:57 +00004920 locations->SetInAt(1, Location::RequiresRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004921
Calin Juravle52c48962014-12-16 17:02:57 +00004922 // 64bits value can be atomically written to an address with movsd and an XMM register.
4923 // We need two XMM registers because there's no easier way to (bit) copy a register pair
4924 // into a single XMM register (we copy each pair part into the XMMs and then interleave them).
4925 // NB: We could make the register allocator understand fp_reg <-> core_reg moves but given the
4926 // isolated cases when we need this it isn't worth adding the extra complexity.
4927 locations->AddTemp(Location::RequiresFpuRegister());
4928 locations->AddTemp(Location::RequiresFpuRegister());
Mark Mendell81489372015-11-04 11:30:41 -05004929 } else {
4930 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
4931
4932 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
4933 // Temporary registers for the write barrier.
4934 locations->AddTemp(Location::RequiresRegister()); // May be used for reference poisoning too.
4935 // Ensure the card is in a byte register.
4936 locations->AddTemp(Location::RegisterLocation(ECX));
4937 }
Calin Juravle52c48962014-12-16 17:02:57 +00004938 }
4939}
4940
4941void InstructionCodeGeneratorX86::HandleFieldSet(HInstruction* instruction,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01004942 const FieldInfo& field_info,
4943 bool value_can_be_null) {
Calin Juravle52c48962014-12-16 17:02:57 +00004944 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet());
4945
4946 LocationSummary* locations = instruction->GetLocations();
4947 Register base = locations->InAt(0).AsRegister<Register>();
4948 Location value = locations->InAt(1);
4949 bool is_volatile = field_info.IsVolatile();
4950 Primitive::Type field_type = field_info.GetFieldType();
4951 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Roland Levillain4d027112015-07-01 15:41:14 +01004952 bool needs_write_barrier =
4953 CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
Calin Juravle52c48962014-12-16 17:02:57 +00004954
4955 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00004956 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
Calin Juravle52c48962014-12-16 17:02:57 +00004957 }
4958
Mark Mendell81489372015-11-04 11:30:41 -05004959 bool maybe_record_implicit_null_check_done = false;
4960
Calin Juravle52c48962014-12-16 17:02:57 +00004961 switch (field_type) {
4962 case Primitive::kPrimBoolean:
4963 case Primitive::kPrimByte: {
4964 __ movb(Address(base, offset), value.AsRegister<ByteRegister>());
4965 break;
4966 }
4967
4968 case Primitive::kPrimShort:
4969 case Primitive::kPrimChar: {
Mark Mendell81489372015-11-04 11:30:41 -05004970 if (value.IsConstant()) {
4971 int16_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4972 __ movw(Address(base, offset), Immediate(v));
4973 } else {
4974 __ movw(Address(base, offset), value.AsRegister<Register>());
4975 }
Calin Juravle52c48962014-12-16 17:02:57 +00004976 break;
4977 }
4978
4979 case Primitive::kPrimInt:
4980 case Primitive::kPrimNot: {
Roland Levillain4d027112015-07-01 15:41:14 +01004981 if (kPoisonHeapReferences && needs_write_barrier) {
4982 // Note that in the case where `value` is a null reference,
4983 // we do not enter this block, as the reference does not
4984 // need poisoning.
4985 DCHECK_EQ(field_type, Primitive::kPrimNot);
4986 Register temp = locations->GetTemp(0).AsRegister<Register>();
4987 __ movl(temp, value.AsRegister<Register>());
4988 __ PoisonHeapReference(temp);
4989 __ movl(Address(base, offset), temp);
Mark Mendell81489372015-11-04 11:30:41 -05004990 } else if (value.IsConstant()) {
4991 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
4992 __ movl(Address(base, offset), Immediate(v));
Roland Levillain4d027112015-07-01 15:41:14 +01004993 } else {
Nicolas Geoffray03971632016-03-17 10:44:24 +00004994 DCHECK(value.IsRegister()) << value;
Roland Levillain4d027112015-07-01 15:41:14 +01004995 __ movl(Address(base, offset), value.AsRegister<Register>());
4996 }
Calin Juravle52c48962014-12-16 17:02:57 +00004997 break;
4998 }
4999
5000 case Primitive::kPrimLong: {
5001 if (is_volatile) {
5002 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
5003 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
5004 __ movd(temp1, value.AsRegisterPairLow<Register>());
5005 __ movd(temp2, value.AsRegisterPairHigh<Register>());
5006 __ punpckldq(temp1, temp2);
5007 __ movsd(Address(base, offset), temp1);
Calin Juravle77520bc2015-01-12 18:45:46 +00005008 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell81489372015-11-04 11:30:41 -05005009 } else if (value.IsConstant()) {
5010 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5011 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5012 codegen_->MaybeRecordImplicitNullCheck(instruction);
5013 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
Calin Juravle52c48962014-12-16 17:02:57 +00005014 } else {
5015 __ movl(Address(base, offset), value.AsRegisterPairLow<Register>());
Calin Juravle77520bc2015-01-12 18:45:46 +00005016 codegen_->MaybeRecordImplicitNullCheck(instruction);
Calin Juravle52c48962014-12-16 17:02:57 +00005017 __ movl(Address(base, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
5018 }
Mark Mendell81489372015-11-04 11:30:41 -05005019 maybe_record_implicit_null_check_done = true;
Calin Juravle52c48962014-12-16 17:02:57 +00005020 break;
5021 }
5022
5023 case Primitive::kPrimFloat: {
Mark Mendell81489372015-11-04 11:30:41 -05005024 if (value.IsConstant()) {
5025 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5026 __ movl(Address(base, offset), Immediate(v));
5027 } else {
5028 __ movss(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5029 }
Calin Juravle52c48962014-12-16 17:02:57 +00005030 break;
5031 }
5032
5033 case Primitive::kPrimDouble: {
Mark Mendell81489372015-11-04 11:30:41 -05005034 if (value.IsConstant()) {
5035 int64_t v = CodeGenerator::GetInt64ValueOf(value.GetConstant());
5036 __ movl(Address(base, offset), Immediate(Low32Bits(v)));
5037 codegen_->MaybeRecordImplicitNullCheck(instruction);
5038 __ movl(Address(base, kX86WordSize + offset), Immediate(High32Bits(v)));
5039 maybe_record_implicit_null_check_done = true;
5040 } else {
5041 __ movsd(Address(base, offset), value.AsFpuRegister<XmmRegister>());
5042 }
Calin Juravle52c48962014-12-16 17:02:57 +00005043 break;
5044 }
5045
5046 case Primitive::kPrimVoid:
5047 LOG(FATAL) << "Unreachable type " << field_type;
5048 UNREACHABLE();
5049 }
5050
Mark Mendell81489372015-11-04 11:30:41 -05005051 if (!maybe_record_implicit_null_check_done) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005052 codegen_->MaybeRecordImplicitNullCheck(instruction);
5053 }
5054
Roland Levillain4d027112015-07-01 15:41:14 +01005055 if (needs_write_barrier) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005056 Register temp = locations->GetTemp(0).AsRegister<Register>();
5057 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005058 codegen_->MarkGCCard(temp, card, base, value.AsRegister<Register>(), value_can_be_null);
Calin Juravle77520bc2015-01-12 18:45:46 +00005059 }
5060
Calin Juravle52c48962014-12-16 17:02:57 +00005061 if (is_volatile) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005062 codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
Calin Juravle52c48962014-12-16 17:02:57 +00005063 }
5064}
5065
5066void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5067 HandleFieldGet(instruction, instruction->GetFieldInfo());
5068}
5069
5070void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5071 HandleFieldGet(instruction, instruction->GetFieldInfo());
5072}
5073
5074void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5075 HandleFieldSet(instruction, instruction->GetFieldInfo());
5076}
5077
5078void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005079 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005080}
5081
5082void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5083 HandleFieldSet(instruction, instruction->GetFieldInfo());
5084}
5085
5086void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01005087 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Calin Juravle52c48962014-12-16 17:02:57 +00005088}
5089
5090void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5091 HandleFieldGet(instruction, instruction->GetFieldInfo());
5092}
5093
5094void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5095 HandleFieldGet(instruction, instruction->GetFieldInfo());
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005096}
5097
Calin Juravlee460d1d2015-09-29 04:52:17 +01005098void LocationsBuilderX86::VisitUnresolvedInstanceFieldGet(
5099 HUnresolvedInstanceFieldGet* instruction) {
5100 FieldAccessCallingConventionX86 calling_convention;
5101 codegen_->CreateUnresolvedFieldLocationSummary(
5102 instruction, instruction->GetFieldType(), calling_convention);
5103}
5104
5105void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldGet(
5106 HUnresolvedInstanceFieldGet* instruction) {
5107 FieldAccessCallingConventionX86 calling_convention;
5108 codegen_->GenerateUnresolvedFieldAccess(instruction,
5109 instruction->GetFieldType(),
5110 instruction->GetFieldIndex(),
5111 instruction->GetDexPc(),
5112 calling_convention);
5113}
5114
5115void LocationsBuilderX86::VisitUnresolvedInstanceFieldSet(
5116 HUnresolvedInstanceFieldSet* instruction) {
5117 FieldAccessCallingConventionX86 calling_convention;
5118 codegen_->CreateUnresolvedFieldLocationSummary(
5119 instruction, instruction->GetFieldType(), calling_convention);
5120}
5121
5122void InstructionCodeGeneratorX86::VisitUnresolvedInstanceFieldSet(
5123 HUnresolvedInstanceFieldSet* instruction) {
5124 FieldAccessCallingConventionX86 calling_convention;
5125 codegen_->GenerateUnresolvedFieldAccess(instruction,
5126 instruction->GetFieldType(),
5127 instruction->GetFieldIndex(),
5128 instruction->GetDexPc(),
5129 calling_convention);
5130}
5131
5132void LocationsBuilderX86::VisitUnresolvedStaticFieldGet(
5133 HUnresolvedStaticFieldGet* instruction) {
5134 FieldAccessCallingConventionX86 calling_convention;
5135 codegen_->CreateUnresolvedFieldLocationSummary(
5136 instruction, instruction->GetFieldType(), calling_convention);
5137}
5138
5139void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldGet(
5140 HUnresolvedStaticFieldGet* instruction) {
5141 FieldAccessCallingConventionX86 calling_convention;
5142 codegen_->GenerateUnresolvedFieldAccess(instruction,
5143 instruction->GetFieldType(),
5144 instruction->GetFieldIndex(),
5145 instruction->GetDexPc(),
5146 calling_convention);
5147}
5148
5149void LocationsBuilderX86::VisitUnresolvedStaticFieldSet(
5150 HUnresolvedStaticFieldSet* instruction) {
5151 FieldAccessCallingConventionX86 calling_convention;
5152 codegen_->CreateUnresolvedFieldLocationSummary(
5153 instruction, instruction->GetFieldType(), calling_convention);
5154}
5155
5156void InstructionCodeGeneratorX86::VisitUnresolvedStaticFieldSet(
5157 HUnresolvedStaticFieldSet* instruction) {
5158 FieldAccessCallingConventionX86 calling_convention;
5159 codegen_->GenerateUnresolvedFieldAccess(instruction,
5160 instruction->GetFieldType(),
5161 instruction->GetFieldIndex(),
5162 instruction->GetDexPc(),
5163 calling_convention);
5164}
5165
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005166void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005167 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5168 Location loc = codegen_->GetCompilerOptions().GetImplicitNullChecks()
5169 ? Location::RequiresRegister()
5170 : Location::Any();
5171 locations->SetInAt(0, loc);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005172}
5173
Calin Juravle2ae48182016-03-16 14:05:09 +00005174void CodeGeneratorX86::GenerateImplicitNullCheck(HNullCheck* instruction) {
5175 if (CanMoveNullCheckToUser(instruction)) {
Calin Juravle77520bc2015-01-12 18:45:46 +00005176 return;
5177 }
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005178 LocationSummary* locations = instruction->GetLocations();
5179 Location obj = locations->InAt(0);
Calin Juravle77520bc2015-01-12 18:45:46 +00005180
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005181 __ testl(EAX, Address(obj.AsRegister<Register>(), 0));
Calin Juravle2ae48182016-03-16 14:05:09 +00005182 RecordPcInfo(instruction, instruction->GetDexPc());
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005183}
5184
Calin Juravle2ae48182016-03-16 14:05:09 +00005185void CodeGeneratorX86::GenerateExplicitNullCheck(HNullCheck* instruction) {
Andreas Gampe85b62f22015-09-09 13:15:38 -07005186 SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005187 AddSlowPath(slow_path);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005188
5189 LocationSummary* locations = instruction->GetLocations();
5190 Location obj = locations->InAt(0);
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005191
5192 if (obj.IsRegister()) {
Mark Mendell42514f62015-03-31 11:34:22 -04005193 __ testl(obj.AsRegister<Register>(), obj.AsRegister<Register>());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005194 } else if (obj.IsStackSlot()) {
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005195 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005196 } else {
5197 DCHECK(obj.IsConstant()) << obj;
David Brazdil77a48ae2015-09-15 12:34:04 +00005198 DCHECK(obj.GetConstant()->IsNullConstant());
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005199 __ jmp(slow_path->GetEntryLabel());
5200 return;
Nicolas Geoffraye5038322014-07-04 09:41:32 +01005201 }
5202 __ j(kEqual, slow_path->GetEntryLabel());
5203}
5204
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005205void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005206 codegen_->GenerateNullCheck(instruction);
Calin Juravlecd6dffe2015-01-08 17:35:35 +00005207}
5208
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005209void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005210 bool object_array_get_with_read_barrier =
5211 kEmitCompilerReadBarrier && (instruction->GetType() == Primitive::kPrimNot);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005212 LocationSummary* locations =
Roland Levillain0d5a2812015-11-13 10:07:31 +00005213 new (GetGraph()->GetArena()) LocationSummary(instruction,
5214 object_array_get_with_read_barrier ?
5215 LocationSummary::kCallOnSlowPath :
5216 LocationSummary::kNoCall);
Vladimir Marko70e97462016-08-09 11:04:26 +01005217 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005218 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01005219 }
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005220 locations->SetInAt(0, Location::RequiresRegister());
5221 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005222 if (Primitive::IsFloatingPointType(instruction->GetType())) {
5223 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5224 } else {
Roland Levillain0d5a2812015-11-13 10:07:31 +00005225 // The output overlaps in case of long: we don't want the low move
5226 // to overwrite the array's location. Likewise, in the case of an
5227 // object array get with read barriers enabled, we do not want the
5228 // move to overwrite the array's location, as we need it to emit
5229 // the read barrier.
5230 locations->SetOut(
5231 Location::RequiresRegister(),
5232 (instruction->GetType() == Primitive::kPrimLong || object_array_get_with_read_barrier) ?
5233 Location::kOutputOverlap :
5234 Location::kNoOutputOverlap);
Alexandre Rames88c13cd2015-04-14 17:35:39 +01005235 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005236}
5237
5238void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
5239 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005240 Location obj_loc = locations->InAt(0);
5241 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005242 Location index = locations->InAt(1);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005243 Location out_loc = locations->Out();
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01005244 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005245
Calin Juravle77520bc2015-01-12 18:45:46 +00005246 Primitive::Type type = instruction->GetType();
5247 switch (type) {
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005248 case Primitive::kPrimBoolean: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005249 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005250 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005251 break;
5252 }
5253
5254 case Primitive::kPrimByte: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005255 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005256 __ movsxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005257 break;
5258 }
5259
5260 case Primitive::kPrimShort: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005261 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005262 __ movsxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005263 break;
5264 }
5265
5266 case Primitive::kPrimChar: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005267 Register out = out_loc.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07005268 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5269 // Branch cases into compressed and uncompressed for each index's type.
5270 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
5271 NearLabel done, not_compressed;
Vladimir Marko3c89d422017-02-17 11:30:23 +00005272 __ testb(Address(obj, count_offset), Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005273 codegen_->MaybeRecordImplicitNullCheck(instruction);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005274 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
5275 "Expecting 0=compressed, 1=uncompressed");
5276 __ j(kNotZero, &not_compressed);
jessicahandojo4877b792016-09-08 19:49:13 -07005277 __ movzxb(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_1, data_offset));
5278 __ jmp(&done);
5279 __ Bind(&not_compressed);
5280 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5281 __ Bind(&done);
5282 } else {
5283 // Common case for charAt of array of char or when string compression's
5284 // feature is turned off.
5285 __ movzxw(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_2, data_offset));
5286 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005287 break;
5288 }
5289
Roland Levillain7c1559a2015-12-15 10:55:36 +00005290 case Primitive::kPrimInt: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005291 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005292 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005293 break;
5294 }
5295
Roland Levillain7c1559a2015-12-15 10:55:36 +00005296 case Primitive::kPrimNot: {
5297 static_assert(
5298 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5299 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00005300 // /* HeapReference<Object> */ out =
5301 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
5302 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005303 // Note that a potential implicit null check is handled in this
5304 // CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier call.
5305 codegen_->GenerateArrayLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00005306 instruction, out_loc, obj, data_offset, index, /* needs_null_check */ true);
Roland Levillain7c1559a2015-12-15 10:55:36 +00005307 } else {
5308 Register out = out_loc.AsRegister<Register>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005309 __ movl(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
5310 codegen_->MaybeRecordImplicitNullCheck(instruction);
5311 // If read barriers are enabled, emit read barriers other than
5312 // Baker's using a slow path (and also unpoison the loaded
5313 // reference, if heap poisoning is enabled).
Roland Levillain7c1559a2015-12-15 10:55:36 +00005314 if (index.IsConstant()) {
5315 uint32_t offset =
5316 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Roland Levillain7c1559a2015-12-15 10:55:36 +00005317 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
5318 } else {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005319 codegen_->MaybeGenerateReadBarrierSlow(
5320 instruction, out_loc, out_loc, obj_loc, data_offset, index);
5321 }
5322 }
5323 break;
5324 }
5325
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005326 case Primitive::kPrimLong: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005327 DCHECK_NE(obj, out_loc.AsRegisterPairLow<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005328 __ movl(out_loc.AsRegisterPairLow<Register>(),
5329 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
5330 codegen_->MaybeRecordImplicitNullCheck(instruction);
5331 __ movl(out_loc.AsRegisterPairHigh<Register>(),
5332 CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset + kX86WordSize));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005333 break;
5334 }
5335
Mark Mendell7c8d0092015-01-26 11:21:33 -05005336 case Primitive::kPrimFloat: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005337 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005338 __ movss(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005339 break;
5340 }
5341
5342 case Primitive::kPrimDouble: {
Roland Levillain7c1559a2015-12-15 10:55:36 +00005343 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005344 __ movsd(out, CodeGeneratorX86::ArrayAddress(obj, index, TIMES_8, data_offset));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005345 break;
5346 }
5347
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005348 case Primitive::kPrimVoid:
Calin Juravle77520bc2015-01-12 18:45:46 +00005349 LOG(FATAL) << "Unreachable type " << type;
Ian Rogersfc787ec2014-10-09 21:56:44 -07005350 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005351 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005352
Roland Levillain7c1559a2015-12-15 10:55:36 +00005353 if (type == Primitive::kPrimNot || type == Primitive::kPrimLong) {
5354 // Potential implicit null checks, in the case of reference or
5355 // long arrays, are handled in the previous switch statement.
5356 } else {
Calin Juravle77520bc2015-01-12 18:45:46 +00005357 codegen_->MaybeRecordImplicitNullCheck(instruction);
5358 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005359}
5360
5361void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
Nicolas Geoffray39468442014-09-02 15:17:15 +01005362 Primitive::Type value_type = instruction->GetComponentType();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005363
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005364 bool needs_write_barrier =
5365 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Roland Levillain0d5a2812015-11-13 10:07:31 +00005366 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005367
Nicolas Geoffray39468442014-09-02 15:17:15 +01005368 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
5369 instruction,
Vladimir Marko8d49fd72016-08-25 15:20:47 +01005370 may_need_runtime_call_for_type_check ?
Roland Levillain0d5a2812015-11-13 10:07:31 +00005371 LocationSummary::kCallOnSlowPath :
5372 LocationSummary::kNoCall);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005373
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005374 bool is_byte_type = (value_type == Primitive::kPrimBoolean)
5375 || (value_type == Primitive::kPrimByte);
5376 // We need the inputs to be different than the output in case of long operation.
5377 // In case of a byte operation, the register allocator does not support multiple
5378 // inputs that die at entry with one in a specific register.
5379 locations->SetInAt(0, Location::RequiresRegister());
5380 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5381 if (is_byte_type) {
5382 // Ensure the value is in a byte register.
5383 locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
5384 } else if (Primitive::IsFloatingPointType(value_type)) {
Mark Mendell81489372015-11-04 11:30:41 -05005385 locations->SetInAt(2, Location::FpuRegisterOrConstant(instruction->InputAt(2)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005386 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005387 locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
5388 }
5389 if (needs_write_barrier) {
5390 // Temporary registers for the write barrier.
5391 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
5392 // Ensure the card is in a byte register.
Roland Levillain4f6b0b52015-11-23 19:29:22 +00005393 locations->AddTemp(Location::RegisterLocation(ECX));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005394 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005395}
5396
5397void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
5398 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005399 Location array_loc = locations->InAt(0);
5400 Register array = array_loc.AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005401 Location index = locations->InAt(1);
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +01005402 Location value = locations->InAt(2);
Nicolas Geoffray39468442014-09-02 15:17:15 +01005403 Primitive::Type value_type = instruction->GetComponentType();
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005404 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5405 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5406 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005407 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005408 bool needs_write_barrier =
5409 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005410
5411 switch (value_type) {
5412 case Primitive::kPrimBoolean:
5413 case Primitive::kPrimByte: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005414 uint32_t offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005415 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_1, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005416 if (value.IsRegister()) {
5417 __ movb(address, value.AsRegister<ByteRegister>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005418 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005419 __ movb(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005420 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005421 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005422 break;
5423 }
5424
5425 case Primitive::kPrimShort:
5426 case Primitive::kPrimChar: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005427 uint32_t offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005428 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_2, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005429 if (value.IsRegister()) {
5430 __ movw(address, value.AsRegister<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005431 } else {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005432 __ movw(address, Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005433 }
Calin Juravle77520bc2015-01-12 18:45:46 +00005434 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005435 break;
5436 }
5437
Nicolas Geoffrayaf07bc12014-11-12 18:08:09 +00005438 case Primitive::kPrimNot: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005439 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005440 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005441
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005442 if (!value.IsRegister()) {
5443 // Just setting null.
5444 DCHECK(instruction->InputAt(2)->IsNullConstant());
5445 DCHECK(value.IsConstant()) << value;
5446 __ movl(address, Immediate(0));
Calin Juravle77520bc2015-01-12 18:45:46 +00005447 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005448 DCHECK(!needs_write_barrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00005449 DCHECK(!may_need_runtime_call_for_type_check);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005450 break;
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005451 }
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005452
5453 DCHECK(needs_write_barrier);
5454 Register register_value = value.AsRegister<Register>();
Roland Levillain16d9f942016-08-25 17:27:56 +01005455 // We cannot use a NearLabel for `done`, as its range may be too
5456 // short when Baker read barriers are enabled.
5457 Label done;
5458 NearLabel not_null, do_put;
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005459 SlowPathCode* slow_path = nullptr;
Roland Levillain16d9f942016-08-25 17:27:56 +01005460 Location temp_loc = locations->GetTemp(0);
5461 Register temp = temp_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00005462 if (may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005463 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathX86(instruction);
5464 codegen_->AddSlowPath(slow_path);
5465 if (instruction->GetValueCanBeNull()) {
5466 __ testl(register_value, register_value);
5467 __ j(kNotEqual, &not_null);
5468 __ movl(address, Immediate(0));
5469 codegen_->MaybeRecordImplicitNullCheck(instruction);
5470 __ jmp(&done);
5471 __ Bind(&not_null);
5472 }
5473
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005474 // Note that when Baker read barriers are enabled, the type
5475 // checks are performed without read barriers. This is fine,
5476 // even in the case where a class object is in the from-space
5477 // after the flip, as a comparison involving such a type would
5478 // not produce a false positive; it may of course produce a
5479 // false negative, in which case we would take the ArraySet
5480 // slow path.
Roland Levillain16d9f942016-08-25 17:27:56 +01005481
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005482 // /* HeapReference<Class> */ temp = array->klass_
5483 __ movl(temp, Address(array, class_offset));
5484 codegen_->MaybeRecordImplicitNullCheck(instruction);
5485 __ MaybeUnpoisonHeapReference(temp);
Roland Levillain16d9f942016-08-25 17:27:56 +01005486
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005487 // /* HeapReference<Class> */ temp = temp->component_type_
5488 __ movl(temp, Address(temp, component_offset));
5489 // If heap poisoning is enabled, no need to unpoison `temp`
5490 // nor the object reference in `register_value->klass`, as
5491 // we are comparing two poisoned references.
5492 __ cmpl(temp, Address(register_value, class_offset));
Roland Levillain16d9f942016-08-25 17:27:56 +01005493
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005494 if (instruction->StaticTypeOfArrayIsObjectArray()) {
5495 __ j(kEqual, &do_put);
5496 // If heap poisoning is enabled, the `temp` reference has
5497 // not been unpoisoned yet; unpoison it now.
Roland Levillain0d5a2812015-11-13 10:07:31 +00005498 __ MaybeUnpoisonHeapReference(temp);
5499
Roland Levillain9d6e1f82016-09-05 15:57:33 +01005500 // If heap poisoning is enabled, no need to unpoison the
5501 // heap reference loaded below, as it is only used for a
5502 // comparison with null.
5503 __ cmpl(Address(temp, super_offset), Immediate(0));
5504 __ j(kNotEqual, slow_path->GetEntryLabel());
5505 __ Bind(&do_put);
5506 } else {
5507 __ j(kNotEqual, slow_path->GetEntryLabel());
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005508 }
5509 }
5510
5511 if (kPoisonHeapReferences) {
5512 __ movl(temp, register_value);
5513 __ PoisonHeapReference(temp);
5514 __ movl(address, temp);
5515 } else {
5516 __ movl(address, register_value);
5517 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005518 if (!may_need_runtime_call_for_type_check) {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005519 codegen_->MaybeRecordImplicitNullCheck(instruction);
5520 }
5521
5522 Register card = locations->GetTemp(1).AsRegister<Register>();
5523 codegen_->MarkGCCard(
5524 temp, card, array, value.AsRegister<Register>(), instruction->GetValueCanBeNull());
5525 __ Bind(&done);
5526
5527 if (slow_path != nullptr) {
5528 __ Bind(slow_path->GetExitLabel());
5529 }
5530
5531 break;
5532 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00005533
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005534 case Primitive::kPrimInt: {
5535 uint32_t offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005536 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005537 if (value.IsRegister()) {
5538 __ movl(address, value.AsRegister<Register>());
5539 } else {
5540 DCHECK(value.IsConstant()) << value;
5541 int32_t v = CodeGenerator::GetInt32ValueOf(value.GetConstant());
5542 __ movl(address, Immediate(v));
5543 }
5544 codegen_->MaybeRecordImplicitNullCheck(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005545 break;
5546 }
5547
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005548 case Primitive::kPrimLong: {
5549 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005550 if (value.IsRegisterPair()) {
5551 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5552 value.AsRegisterPairLow<Register>());
5553 codegen_->MaybeRecordImplicitNullCheck(instruction);
5554 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5555 value.AsRegisterPairHigh<Register>());
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005556 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005557 DCHECK(value.IsConstant());
5558 int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
5559 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset),
5560 Immediate(Low32Bits(val)));
5561 codegen_->MaybeRecordImplicitNullCheck(instruction);
5562 __ movl(CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, data_offset + kX86WordSize),
5563 Immediate(High32Bits(val)));
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005564 }
5565 break;
5566 }
5567
Mark Mendell7c8d0092015-01-26 11:21:33 -05005568 case Primitive::kPrimFloat: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005569 uint32_t offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005570 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_4, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005571 if (value.IsFpuRegister()) {
5572 __ movss(address, value.AsFpuRegister<XmmRegister>());
5573 } else {
5574 DCHECK(value.IsConstant());
5575 int32_t v = bit_cast<int32_t, float>(value.GetConstant()->AsFloatConstant()->GetValue());
5576 __ movl(address, Immediate(v));
5577 }
5578 codegen_->MaybeRecordImplicitNullCheck(instruction);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005579 break;
5580 }
5581
5582 case Primitive::kPrimDouble: {
Nicolas Geoffraye0395dd2015-09-25 11:04:45 +01005583 uint32_t offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005584 Address address = CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset);
Mark Mendell81489372015-11-04 11:30:41 -05005585 if (value.IsFpuRegister()) {
5586 __ movsd(address, value.AsFpuRegister<XmmRegister>());
5587 } else {
5588 DCHECK(value.IsConstant());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005589 Address address_hi =
5590 CodeGeneratorX86::ArrayAddress(array, index, TIMES_8, offset + kX86WordSize);
Mark Mendell81489372015-11-04 11:30:41 -05005591 int64_t v = bit_cast<int64_t, double>(value.GetConstant()->AsDoubleConstant()->GetValue());
5592 __ movl(address, Immediate(Low32Bits(v)));
5593 codegen_->MaybeRecordImplicitNullCheck(instruction);
5594 __ movl(address_hi, Immediate(High32Bits(v)));
5595 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005596 break;
5597 }
5598
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005599 case Primitive::kPrimVoid:
5600 LOG(FATAL) << "Unreachable type " << instruction->GetType();
Ian Rogersfc787ec2014-10-09 21:56:44 -07005601 UNREACHABLE();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005602 }
5603}
5604
5605void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
5606 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Nicolas Geoffray8e3964b2014-10-17 11:06:38 +01005607 locations->SetInAt(0, Location::RequiresRegister());
Mark Mendellee8d9712016-07-12 11:13:15 -04005608 if (!instruction->IsEmittedAtUseSite()) {
5609 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5610 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005611}
5612
5613void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
Mark Mendellee8d9712016-07-12 11:13:15 -04005614 if (instruction->IsEmittedAtUseSite()) {
5615 return;
5616 }
5617
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005618 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01005619 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Roland Levillain271ab9c2014-11-27 15:23:57 +00005620 Register obj = locations->InAt(0).AsRegister<Register>();
5621 Register out = locations->Out().AsRegister<Register>();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005622 __ movl(out, Address(obj, offset));
Calin Juravle77520bc2015-01-12 18:45:46 +00005623 codegen_->MaybeRecordImplicitNullCheck(instruction);
jessicahandojo4877b792016-09-08 19:49:13 -07005624 // Mask out most significant bit in case the array is String's array of char.
5625 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005626 __ shrl(out, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005627 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005628}
5629
5630void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005631 RegisterSet caller_saves = RegisterSet::Empty();
5632 InvokeRuntimeCallingConvention calling_convention;
5633 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5634 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
5635 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Mark Mendellf60c90b2015-03-04 15:12:59 -05005636 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Mark Mendellee8d9712016-07-12 11:13:15 -04005637 HInstruction* length = instruction->InputAt(1);
5638 if (!length->IsEmittedAtUseSite()) {
5639 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
5640 }
jessicahandojo4877b792016-09-08 19:49:13 -07005641 // Need register to see array's length.
5642 if (mirror::kUseStringCompression && instruction->IsStringCharAt()) {
5643 locations->AddTemp(Location::RequiresRegister());
5644 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005645}
5646
5647void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
jessicahandojo4877b792016-09-08 19:49:13 -07005648 const bool is_string_compressed_char_at =
5649 mirror::kUseStringCompression && instruction->IsStringCharAt();
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005650 LocationSummary* locations = instruction->GetLocations();
Mark Mendellf60c90b2015-03-04 15:12:59 -05005651 Location index_loc = locations->InAt(0);
5652 Location length_loc = locations->InAt(1);
Andreas Gampe85b62f22015-09-09 13:15:38 -07005653 SlowPathCode* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005654 new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(instruction);
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005655
Mark Mendell99dbd682015-04-22 16:18:52 -04005656 if (length_loc.IsConstant()) {
5657 int32_t length = CodeGenerator::GetInt32ValueOf(length_loc.GetConstant());
5658 if (index_loc.IsConstant()) {
5659 // BCE will remove the bounds check if we are guarenteed to pass.
5660 int32_t index = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5661 if (index < 0 || index >= length) {
5662 codegen_->AddSlowPath(slow_path);
5663 __ jmp(slow_path->GetEntryLabel());
5664 } else {
5665 // Some optimization after BCE may have generated this, and we should not
5666 // generate a bounds check if it is a valid range.
5667 }
5668 return;
5669 }
5670
5671 // We have to reverse the jump condition because the length is the constant.
5672 Register index_reg = index_loc.AsRegister<Register>();
5673 __ cmpl(index_reg, Immediate(length));
5674 codegen_->AddSlowPath(slow_path);
5675 __ j(kAboveEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005676 } else {
Mark Mendellee8d9712016-07-12 11:13:15 -04005677 HInstruction* array_length = instruction->InputAt(1);
5678 if (array_length->IsEmittedAtUseSite()) {
5679 // Address the length field in the array.
5680 DCHECK(array_length->IsArrayLength());
5681 uint32_t len_offset = CodeGenerator::GetArrayLengthOffset(array_length->AsArrayLength());
5682 Location array_loc = array_length->GetLocations()->InAt(0);
5683 Address array_len(array_loc.AsRegister<Register>(), len_offset);
jessicahandojo4877b792016-09-08 19:49:13 -07005684 if (is_string_compressed_char_at) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005685 // TODO: if index_loc.IsConstant(), compare twice the index (to compensate for
5686 // the string compression flag) with the in-memory length and avoid the temporary.
jessicahandojo4877b792016-09-08 19:49:13 -07005687 Register length_reg = locations->GetTemp(0).AsRegister<Register>();
5688 __ movl(length_reg, array_len);
5689 codegen_->MaybeRecordImplicitNullCheck(array_length);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01005690 __ shrl(length_reg, Immediate(1));
jessicahandojo4877b792016-09-08 19:49:13 -07005691 codegen_->GenerateIntCompare(length_reg, index_loc);
Mark Mendellee8d9712016-07-12 11:13:15 -04005692 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07005693 // Checking bounds for general case:
5694 // Array of char or string's array with feature compression off.
5695 if (index_loc.IsConstant()) {
5696 int32_t value = CodeGenerator::GetInt32ValueOf(index_loc.GetConstant());
5697 __ cmpl(array_len, Immediate(value));
5698 } else {
5699 __ cmpl(array_len, index_loc.AsRegister<Register>());
5700 }
5701 codegen_->MaybeRecordImplicitNullCheck(array_length);
Mark Mendellee8d9712016-07-12 11:13:15 -04005702 }
Mark Mendell99dbd682015-04-22 16:18:52 -04005703 } else {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01005704 codegen_->GenerateIntCompare(length_loc, index_loc);
Mark Mendell99dbd682015-04-22 16:18:52 -04005705 }
5706 codegen_->AddSlowPath(slow_path);
5707 __ j(kBelowEqual, slow_path->GetEntryLabel());
Mark Mendellf60c90b2015-03-04 15:12:59 -05005708 }
Nicolas Geoffray3c7bb982014-07-23 16:04:16 +01005709}
5710
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01005711void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005712 LOG(FATAL) << "Unreachable";
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01005713}
5714
5715void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005716 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5717}
5718
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005719void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005720 LocationSummary* locations =
5721 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Aart Bikb13c65b2017-03-21 20:14:07 -07005722 // In suspend check slow path, usually there are no caller-save registers at all.
5723 // If SIMD instructions are present, however, we force spilling all live SIMD
5724 // registers in full width (since the runtime only saves/restores lower part).
Aart Bik5576f372017-03-23 16:17:37 -07005725 locations->SetCustomSlowPathCallerSaves(
5726 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005727}
5728
5729void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005730 HBasicBlock* block = instruction->GetBlock();
5731 if (block->GetLoopInformation() != nullptr) {
5732 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5733 // The back edge will generate the suspend check.
5734 return;
5735 }
5736 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5737 // The goto will generate the suspend check.
5738 return;
5739 }
5740 GenerateSuspendCheck(instruction, nullptr);
5741}
5742
5743void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
5744 HBasicBlock* successor) {
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005745 SuspendCheckSlowPathX86* slow_path =
Nicolas Geoffraydb216f42015-05-05 17:02:20 +01005746 down_cast<SuspendCheckSlowPathX86*>(instruction->GetSlowPath());
5747 if (slow_path == nullptr) {
5748 slow_path = new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
5749 instruction->SetSlowPath(slow_path);
5750 codegen_->AddSlowPath(slow_path);
5751 if (successor != nullptr) {
5752 DCHECK(successor->IsLoopHeader());
5753 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(instruction);
5754 }
5755 } else {
5756 DCHECK_EQ(slow_path->GetSuccessor(), successor);
5757 }
5758
Andreas Gampe542451c2016-07-26 09:02:02 -07005759 __ fs()->cmpw(Address::Absolute(Thread::ThreadFlagsOffset<kX86PointerSize>().Int32Value()),
Roland Levillain7c1559a2015-12-15 10:55:36 +00005760 Immediate(0));
Nicolas Geoffray3c049742014-09-24 18:10:46 +01005761 if (successor == nullptr) {
5762 __ j(kNotEqual, slow_path->GetEntryLabel());
5763 __ Bind(slow_path->GetReturnLabel());
5764 } else {
5765 __ j(kEqual, codegen_->GetLabelOf(successor));
5766 __ jmp(slow_path->GetEntryLabel());
5767 }
Nicolas Geoffrayfbc695f2014-09-15 15:33:30 +00005768}
5769
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005770X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
5771 return codegen_->GetAssembler();
5772}
5773
Mark Mendell7c8d0092015-01-26 11:21:33 -05005774void ParallelMoveResolverX86::MoveMemoryToMemory32(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005775 ScratchRegisterScope ensure_scratch(
5776 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5777 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5778 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5779 __ movl(temp_reg, Address(ESP, src + stack_offset));
5780 __ movl(Address(ESP, dst + stack_offset), temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005781}
5782
5783void ParallelMoveResolverX86::MoveMemoryToMemory64(int dst, int src) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005784 ScratchRegisterScope ensure_scratch(
5785 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5786 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5787 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5788 __ movl(temp_reg, Address(ESP, src + stack_offset));
5789 __ movl(Address(ESP, dst + stack_offset), temp_reg);
5790 __ movl(temp_reg, Address(ESP, src + stack_offset + kX86WordSize));
5791 __ movl(Address(ESP, dst + stack_offset + kX86WordSize), temp_reg);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005792}
5793
5794void ParallelMoveResolverX86::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005795 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005796 Location source = move->GetSource();
5797 Location destination = move->GetDestination();
5798
5799 if (source.IsRegister()) {
5800 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005801 __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005802 } else if (destination.IsFpuRegister()) {
5803 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005804 } else {
5805 DCHECK(destination.IsStackSlot());
Roland Levillain271ab9c2014-11-27 15:23:57 +00005806 __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005807 }
David Brazdil74eb1b22015-12-14 11:44:01 +00005808 } else if (source.IsRegisterPair()) {
5809 size_t elem_size = Primitive::ComponentSize(Primitive::kPrimInt);
5810 // Create stack space for 2 elements.
5811 __ subl(ESP, Immediate(2 * elem_size));
5812 __ movl(Address(ESP, 0), source.AsRegisterPairLow<Register>());
5813 __ movl(Address(ESP, elem_size), source.AsRegisterPairHigh<Register>());
5814 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
5815 // And remove the temporary stack space we allocated.
5816 __ addl(ESP, Immediate(2 * elem_size));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005817 } else if (source.IsFpuRegister()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005818 if (destination.IsRegister()) {
5819 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
5820 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005821 __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
David Brazdil74eb1b22015-12-14 11:44:01 +00005822 } else if (destination.IsRegisterPair()) {
5823 XmmRegister src_reg = source.AsFpuRegister<XmmRegister>();
5824 __ movd(destination.AsRegisterPairLow<Register>(), src_reg);
5825 __ psrlq(src_reg, Immediate(32));
5826 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005827 } else if (destination.IsStackSlot()) {
5828 __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005829 } else if (destination.IsDoubleStackSlot()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005830 __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Aart Bik5576f372017-03-23 16:17:37 -07005831 } else {
5832 DCHECK(destination.IsSIMDStackSlot());
5833 __ movups(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005834 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005835 } else if (source.IsStackSlot()) {
5836 if (destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005837 __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005838 } else if (destination.IsFpuRegister()) {
5839 __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005840 } else {
5841 DCHECK(destination.IsStackSlot());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005842 MoveMemoryToMemory32(destination.GetStackIndex(), source.GetStackIndex());
5843 }
5844 } else if (source.IsDoubleStackSlot()) {
David Brazdil74eb1b22015-12-14 11:44:01 +00005845 if (destination.IsRegisterPair()) {
5846 __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
5847 __ movl(destination.AsRegisterPairHigh<Register>(),
5848 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
5849 } else if (destination.IsFpuRegister()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005850 __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
5851 } else {
5852 DCHECK(destination.IsDoubleStackSlot()) << destination;
5853 MoveMemoryToMemory64(destination.GetStackIndex(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005854 }
Aart Bik5576f372017-03-23 16:17:37 -07005855 } else if (source.IsSIMDStackSlot()) {
5856 DCHECK(destination.IsFpuRegister());
5857 __ movups(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005858 } else if (source.IsConstant()) {
Mark Mendell7c8d0092015-01-26 11:21:33 -05005859 HConstant* constant = source.GetConstant();
Nicolas Geoffrayd6138ef2015-02-18 14:48:53 +00005860 if (constant->IsIntConstant() || constant->IsNullConstant()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005861 int32_t value = CodeGenerator::GetInt32ValueOf(constant);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005862 if (destination.IsRegister()) {
Mark Mendell09b84632015-02-13 17:48:38 -05005863 if (value == 0) {
5864 __ xorl(destination.AsRegister<Register>(), destination.AsRegister<Register>());
5865 } else {
5866 __ movl(destination.AsRegister<Register>(), Immediate(value));
5867 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005868 } else {
5869 DCHECK(destination.IsStackSlot()) << destination;
Mark Mendell09b84632015-02-13 17:48:38 -05005870 __ movl(Address(ESP, destination.GetStackIndex()), Immediate(value));
Mark Mendell7c8d0092015-01-26 11:21:33 -05005871 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005872 } else if (constant->IsFloatConstant()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005873 float fp_value = constant->AsFloatConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005874 int32_t value = bit_cast<int32_t, float>(fp_value);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005875 Immediate imm(value);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005876 if (destination.IsFpuRegister()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005877 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5878 if (value == 0) {
5879 // Easy handling of 0.0.
5880 __ xorps(dest, dest);
5881 } else {
5882 ScratchRegisterScope ensure_scratch(
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005883 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5884 Register temp = static_cast<Register>(ensure_scratch.GetRegister());
5885 __ movl(temp, Immediate(value));
5886 __ movd(dest, temp);
Mark Mendell3f6c7f62015-03-13 13:47:53 -04005887 }
Mark Mendell7c8d0092015-01-26 11:21:33 -05005888 } else {
5889 DCHECK(destination.IsStackSlot()) << destination;
5890 __ movl(Address(ESP, destination.GetStackIndex()), imm);
5891 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005892 } else if (constant->IsLongConstant()) {
5893 int64_t value = constant->AsLongConstant()->GetValue();
5894 int32_t low_value = Low32Bits(value);
5895 int32_t high_value = High32Bits(value);
5896 Immediate low(low_value);
5897 Immediate high(high_value);
5898 if (destination.IsDoubleStackSlot()) {
5899 __ movl(Address(ESP, destination.GetStackIndex()), low);
5900 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5901 } else {
5902 __ movl(destination.AsRegisterPairLow<Register>(), low);
5903 __ movl(destination.AsRegisterPairHigh<Register>(), high);
5904 }
5905 } else {
5906 DCHECK(constant->IsDoubleConstant());
5907 double dbl_value = constant->AsDoubleConstant()->GetValue();
Roland Levillainda4d79b2015-03-24 14:36:11 +00005908 int64_t value = bit_cast<int64_t, double>(dbl_value);
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00005909 int32_t low_value = Low32Bits(value);
5910 int32_t high_value = High32Bits(value);
5911 Immediate low(low_value);
5912 Immediate high(high_value);
5913 if (destination.IsFpuRegister()) {
5914 XmmRegister dest = destination.AsFpuRegister<XmmRegister>();
5915 if (value == 0) {
5916 // Easy handling of 0.0.
5917 __ xorpd(dest, dest);
5918 } else {
5919 __ pushl(high);
5920 __ pushl(low);
5921 __ movsd(dest, Address(ESP, 0));
5922 __ addl(ESP, Immediate(8));
5923 }
5924 } else {
5925 DCHECK(destination.IsDoubleStackSlot()) << destination;
5926 __ movl(Address(ESP, destination.GetStackIndex()), low);
5927 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), high);
5928 }
Nicolas Geoffray96f89a22014-07-11 10:57:49 +01005929 }
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005930 } else {
Nicolas Geoffray42d1f5f2015-01-16 09:14:18 +00005931 LOG(FATAL) << "Unimplemented move: " << destination << " <- " << source;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005932 }
5933}
5934
Mark Mendella5c19ce2015-04-01 12:51:05 -04005935void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005936 Register suggested_scratch = reg == EAX ? EBX : EAX;
5937 ScratchRegisterScope ensure_scratch(
5938 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
5939
5940 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5941 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
5942 __ movl(Address(ESP, mem + stack_offset), reg);
5943 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005944}
5945
Mark Mendell7c8d0092015-01-26 11:21:33 -05005946void ParallelMoveResolverX86::Exchange32(XmmRegister reg, int mem) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005947 ScratchRegisterScope ensure_scratch(
5948 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
5949
5950 Register temp_reg = static_cast<Register>(ensure_scratch.GetRegister());
5951 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
5952 __ movl(temp_reg, Address(ESP, mem + stack_offset));
5953 __ movss(Address(ESP, mem + stack_offset), reg);
5954 __ movd(reg, temp_reg);
Mark Mendell7c8d0092015-01-26 11:21:33 -05005955}
5956
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005957void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005958 ScratchRegisterScope ensure_scratch1(
5959 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005960
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005961 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
5962 ScratchRegisterScope ensure_scratch2(
5963 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
Nicolas Geoffraye27f31a2014-06-12 17:53:14 +01005964
Guillaume Sancheze14590b2015-04-15 18:57:27 +00005965 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
5966 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
5967 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
5968 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
5969 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
5970 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005971}
5972
5973void ParallelMoveResolverX86::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01005974 MoveOperands* move = moves_[index];
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005975 Location source = move->GetSource();
5976 Location destination = move->GetDestination();
5977
5978 if (source.IsRegister() && destination.IsRegister()) {
Mark Mendell90979812015-07-28 16:41:21 -04005979 // Use XOR swap algorithm to avoid serializing XCHG instruction or using a temporary.
5980 DCHECK_NE(destination.AsRegister<Register>(), source.AsRegister<Register>());
5981 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
5982 __ xorl(source.AsRegister<Register>(), destination.AsRegister<Register>());
5983 __ xorl(destination.AsRegister<Register>(), source.AsRegister<Register>());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005984 } else if (source.IsRegister() && destination.IsStackSlot()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005985 Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005986 } else if (source.IsStackSlot() && destination.IsRegister()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00005987 Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01005988 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
5989 Exchange(destination.GetStackIndex(), source.GetStackIndex());
Mark Mendell7c8d0092015-01-26 11:21:33 -05005990 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) {
5991 // Use XOR Swap algorithm to avoid a temporary.
5992 DCHECK_NE(source.reg(), destination.reg());
5993 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5994 __ xorpd(source.AsFpuRegister<XmmRegister>(), destination.AsFpuRegister<XmmRegister>());
5995 __ xorpd(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
5996 } else if (source.IsFpuRegister() && destination.IsStackSlot()) {
5997 Exchange32(source.AsFpuRegister<XmmRegister>(), destination.GetStackIndex());
5998 } else if (destination.IsFpuRegister() && source.IsStackSlot()) {
5999 Exchange32(destination.AsFpuRegister<XmmRegister>(), source.GetStackIndex());
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00006000 } else if (source.IsFpuRegister() && destination.IsDoubleStackSlot()) {
6001 // Take advantage of the 16 bytes in the XMM register.
6002 XmmRegister reg = source.AsFpuRegister<XmmRegister>();
6003 Address stack(ESP, destination.GetStackIndex());
6004 // Load the double into the high doubleword.
6005 __ movhpd(reg, stack);
6006
6007 // Store the low double into the destination.
6008 __ movsd(stack, reg);
6009
6010 // Move the high double to the low double.
6011 __ psrldq(reg, Immediate(8));
6012 } else if (destination.IsFpuRegister() && source.IsDoubleStackSlot()) {
6013 // Take advantage of the 16 bytes in the XMM register.
6014 XmmRegister reg = destination.AsFpuRegister<XmmRegister>();
6015 Address stack(ESP, source.GetStackIndex());
6016 // Load the double into the high doubleword.
6017 __ movhpd(reg, stack);
6018
6019 // Store the low double into the destination.
6020 __ movsd(stack, reg);
6021
6022 // Move the high double to the low double.
6023 __ psrldq(reg, Immediate(8));
6024 } else if (destination.IsDoubleStackSlot() && source.IsDoubleStackSlot()) {
6025 Exchange(destination.GetStackIndex(), source.GetStackIndex());
6026 Exchange(destination.GetHighStackIndex(kX86WordSize), source.GetHighStackIndex(kX86WordSize));
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006027 } else {
Mark Mendell7c8d0092015-01-26 11:21:33 -05006028 LOG(FATAL) << "Unimplemented: source: " << source << ", destination: " << destination;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +01006029 }
6030}
6031
6032void ParallelMoveResolverX86::SpillScratch(int reg) {
6033 __ pushl(static_cast<Register>(reg));
6034}
6035
6036void ParallelMoveResolverX86::RestoreScratch(int reg) {
6037 __ popl(static_cast<Register>(reg));
Nicolas Geoffray4e3d23a2014-05-22 18:32:45 +01006038}
6039
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006040HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
6041 HLoadClass::LoadKind desired_class_load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006042 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006043 case HLoadClass::LoadKind::kInvalid:
6044 LOG(FATAL) << "UNREACHABLE";
6045 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006046 case HLoadClass::LoadKind::kReferrersClass:
6047 break;
6048 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
6049 DCHECK(!GetCompilerOptions().GetCompilePic());
6050 break;
6051 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
6052 DCHECK(GetCompilerOptions().GetCompilePic());
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006053 FALLTHROUGH_INTENDED;
6054 case HLoadClass::LoadKind::kBssEntry:
6055 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006056 break;
6057 case HLoadClass::LoadKind::kBootImageAddress:
6058 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006059 case HLoadClass::LoadKind::kJitTableAddress:
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006060 DCHECK(Runtime::Current()->UseJitCompilation());
6061 break;
6062 case HLoadClass::LoadKind::kDexCacheViaMethod:
6063 break;
6064 }
6065 return desired_class_load_kind;
6066}
6067
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006068void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006069 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
6070 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006071 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00006072 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006073 cls,
6074 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00006075 Location::RegisterLocation(EAX));
Vladimir Markoea4c1262017-02-06 19:59:33 +00006076 DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006077 return;
6078 }
Vladimir Marko41559982017-01-06 14:04:23 +00006079 DCHECK(!cls->NeedsAccessCheck());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006080
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006081 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6082 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006083 ? LocationSummary::kCallOnSlowPath
6084 : LocationSummary::kNoCall;
6085 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006086 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006087 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006088 }
6089
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006090 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006091 load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
6092 load_kind == HLoadClass::LoadKind::kBssEntry) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006093 locations->SetInAt(0, Location::RequiresRegister());
6094 }
6095 locations->SetOut(Location::RequiresRegister());
Vladimir Markoea4c1262017-02-06 19:59:33 +00006096 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6097 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6098 // Rely on the type resolution and/or initialization to save everything.
6099 RegisterSet caller_saves = RegisterSet::Empty();
6100 InvokeRuntimeCallingConvention calling_convention;
6101 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6102 locations->SetCustomSlowPathCallerSaves(caller_saves);
6103 } else {
6104 // For non-Baker read barrier we have a temp-clobbering call.
6105 }
6106 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006107}
6108
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006109Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
6110 dex::TypeIndex dex_index,
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006111 Handle<mirror::Class> handle) {
6112 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
6113 reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006114 // Add a patch entry and return the label.
6115 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
6116 PatchInfo<Label>* info = &jit_class_patches_.back();
6117 return &info->label;
6118}
6119
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006120// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6121// move.
6122void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006123 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
6124 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
6125 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006126 return;
6127 }
Vladimir Marko41559982017-01-06 14:04:23 +00006128 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006129
Vladimir Marko41559982017-01-06 14:04:23 +00006130 LocationSummary* locations = cls->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006131 Location out_loc = locations->Out();
6132 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006133
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006134 bool generate_null_check = false;
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006135 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6136 ? kWithoutReadBarrier
6137 : kCompilerReadBarrierOption;
Vladimir Marko41559982017-01-06 14:04:23 +00006138 switch (load_kind) {
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006139 case HLoadClass::LoadKind::kReferrersClass: {
6140 DCHECK(!cls->CanCallRuntime());
6141 DCHECK(!cls->MustGenerateClinitCheck());
6142 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6143 Register current_method = locations->InAt(0).AsRegister<Register>();
6144 GenerateGcRootFieldLoad(
Mathieu Chartier31b12e32016-09-02 17:11:57 -07006145 cls,
6146 out_loc,
6147 Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()),
Roland Levillain00468f32016-10-27 18:02:48 +01006148 /* fixup_label */ nullptr,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006149 read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006150 break;
6151 }
6152 case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006153 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006154 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006155 __ movl(out, Immediate(/* placeholder */ 0));
Vladimir Marko1998cd02017-01-13 13:02:58 +00006156 codegen_->RecordBootTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006157 break;
6158 }
6159 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006160 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006161 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006162 Register method_address = locations->InAt(0).AsRegister<Register>();
6163 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Marko1998cd02017-01-13 13:02:58 +00006164 codegen_->RecordBootTypePatch(cls);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006165 break;
6166 }
6167 case HLoadClass::LoadKind::kBootImageAddress: {
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006168 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006169 uint32_t address = dchecked_integral_cast<uint32_t>(
6170 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6171 DCHECK_NE(address, 0u);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006172 __ movl(out, Immediate(address));
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006173 break;
6174 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006175 case HLoadClass::LoadKind::kBssEntry: {
6176 Register method_address = locations->InAt(0).AsRegister<Register>();
6177 Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6178 Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
6179 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
6180 generate_null_check = true;
6181 break;
6182 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00006183 case HLoadClass::LoadKind::kJitTableAddress: {
6184 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6185 Label* fixup_label = codegen_->NewJitRootClassPatch(
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006186 cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006187 // /* GcRoot<mirror::Class> */ out = *address
Vladimir Markoea4c1262017-02-06 19:59:33 +00006188 GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006189 break;
6190 }
Vladimir Marko41559982017-01-06 14:04:23 +00006191 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006192 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006193 LOG(FATAL) << "UNREACHABLE";
6194 UNREACHABLE();
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006195 }
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006196
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006197 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6198 DCHECK(cls->CanCallRuntime());
6199 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
6200 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
6201 codegen_->AddSlowPath(slow_path);
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006202
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006203 if (generate_null_check) {
6204 __ testl(out, out);
6205 __ j(kEqual, slow_path->GetEntryLabel());
6206 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00006207
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006208 if (cls->MustGenerateClinitCheck()) {
6209 GenerateClassInitializationCheck(slow_path, out);
6210 } else {
6211 __ Bind(slow_path->GetExitLabel());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006212 }
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006213 }
6214}
6215
6216void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
6217 LocationSummary* locations =
6218 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
6219 locations->SetInAt(0, Location::RequiresRegister());
6220 if (check->HasUses()) {
6221 locations->SetOut(Location::SameAsFirstInput());
6222 }
6223}
6224
6225void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006226 // We assume the class to not be null.
Andreas Gampe85b62f22015-09-09 13:15:38 -07006227 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006228 check->GetLoadClass(), check, check->GetDexPc(), true);
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006229 codegen_->AddSlowPath(slow_path);
Roland Levillain199f3362014-11-27 17:15:16 +00006230 GenerateClassInitializationCheck(slow_path,
6231 check->GetLocations()->InAt(0).AsRegister<Register>());
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006232}
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006233
Nicolas Geoffray424f6762014-11-03 14:51:25 +00006234void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
Andreas Gampe85b62f22015-09-09 13:15:38 -07006235 SlowPathCode* slow_path, Register class_reg) {
Nicolas Geoffray19a19cf2014-10-22 16:07:05 +01006236 __ cmpl(Address(class_reg, mirror::Class::StatusOffset().Int32Value()),
6237 Immediate(mirror::Class::kStatusInitialized));
6238 __ j(kLess, slow_path->GetEntryLabel());
6239 __ Bind(slow_path->GetExitLabel());
6240 // No need for memory fence, thanks to the X86 memory model.
6241}
6242
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006243HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
6244 HLoadString::LoadKind desired_string_load_kind) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006245 switch (desired_string_load_kind) {
6246 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
6247 DCHECK(!GetCompilerOptions().GetCompilePic());
6248 break;
6249 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
6250 DCHECK(GetCompilerOptions().GetCompilePic());
6251 FALLTHROUGH_INTENDED;
Vladimir Markoaad75c62016-10-03 08:46:48 +00006252 case HLoadString::LoadKind::kBssEntry:
Calin Juravleffc87072016-04-20 14:22:09 +01006253 DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006254 break;
6255 case HLoadString::LoadKind::kBootImageAddress:
6256 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006257 case HLoadString::LoadKind::kJitTableAddress:
6258 DCHECK(Runtime::Current()->UseJitCompilation());
6259 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006260 case HLoadString::LoadKind::kDexCacheViaMethod:
6261 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006262 }
6263 return desired_string_load_kind;
6264}
6265
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006266void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006267 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00006268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006269 HLoadString::LoadKind load_kind = load->GetLoadKind();
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006270 if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
Vladimir Markoaad75c62016-10-03 08:46:48 +00006271 load_kind == HLoadString::LoadKind::kBssEntry) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006272 locations->SetInAt(0, Location::RequiresRegister());
6273 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006274 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
6275 locations->SetOut(Location::RegisterLocation(EAX));
6276 } else {
6277 locations->SetOut(Location::RequiresRegister());
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006278 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6279 if (!kUseReadBarrier || kUseBakerReadBarrier) {
Vladimir Markoea4c1262017-02-06 19:59:33 +00006280 // Rely on the pResolveString to save everything.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006281 RegisterSet caller_saves = RegisterSet::Empty();
6282 InvokeRuntimeCallingConvention calling_convention;
6283 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6284 locations->SetCustomSlowPathCallerSaves(caller_saves);
6285 } else {
6286 // For non-Baker read barrier we have a temp-clobbering call.
6287 }
6288 }
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006289 }
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006290}
6291
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006292Label* CodeGeneratorX86::NewJitRootStringPatch(const DexFile& dex_file,
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006293 dex::StringIndex dex_index,
6294 Handle<mirror::String> handle) {
6295 jit_string_roots_.Overwrite(
6296 StringReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006297 // Add a patch entry and return the label.
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006298 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006299 PatchInfo<Label>* info = &jit_string_patches_.back();
6300 return &info->label;
6301}
6302
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006303// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6304// move.
6305void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffrayfbdaa302015-05-29 12:06:56 +01006306 LocationSummary* locations = load->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006307 Location out_loc = locations->Out();
6308 Register out = out_loc.AsRegister<Register>();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006309
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006310 switch (load->GetLoadKind()) {
6311 case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006312 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006313 __ movl(out, Immediate(/* placeholder */ 0));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006314 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006315 return; // No dex cache slow path.
6316 }
6317 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006318 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006319 Register method_address = locations->InAt(0).AsRegister<Register>();
6320 __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
Vladimir Markoaad75c62016-10-03 08:46:48 +00006321 codegen_->RecordBootStringPatch(load);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006322 return; // No dex cache slow path.
6323 }
6324 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006325 uint32_t address = dchecked_integral_cast<uint32_t>(
6326 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6327 DCHECK_NE(address, 0u);
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006328 __ movl(out, Immediate(address));
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006329 return; // No dex cache slow path.
6330 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00006331 case HLoadString::LoadKind::kBssEntry: {
6332 Register method_address = locations->InAt(0).AsRegister<Register>();
6333 Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset);
6334 Label* fixup_label = codegen_->NewStringBssEntryPatch(load);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006335 // /* GcRoot<mirror::String> */ out = *address /* PC-relative */
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006336 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
Vladimir Markoaad75c62016-10-03 08:46:48 +00006337 SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
6338 codegen_->AddSlowPath(slow_path);
6339 __ testl(out, out);
6340 __ j(kEqual, slow_path->GetEntryLabel());
6341 __ Bind(slow_path->GetExitLabel());
6342 return;
6343 }
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006344 case HLoadString::LoadKind::kJitTableAddress: {
6345 Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
6346 Label* fixup_label = codegen_->NewJitRootStringPatch(
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006347 load->GetDexFile(), load->GetStringIndex(), load->GetString());
Nicolas Geoffray132d8362016-11-16 09:19:42 +00006348 // /* GcRoot<mirror::String> */ out = *address
6349 GenerateGcRootFieldLoad(load, out_loc, address, fixup_label, kCompilerReadBarrierOption);
6350 return;
6351 }
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006352 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006353 break;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006354 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006355
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006356 // TODO: Re-add the compiler code to do string dex cache lookup again.
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006357 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko94ce9c22016-09-30 14:50:51 +01006358 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08006359 __ movl(calling_convention.GetRegisterAt(0), Immediate(load->GetStringIndex().index_));
Christina Wadsworth175d09b2016-08-31 16:26:01 -07006360 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6361 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +00006362}
6363
David Brazdilcb1c0552015-08-04 16:22:25 +01006364static Address GetExceptionTlsAddress() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006365 return Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>().Int32Value());
David Brazdilcb1c0552015-08-04 16:22:25 +01006366}
6367
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006368void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
6369 LocationSummary* locations =
6370 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
6371 locations->SetOut(Location::RequiresRegister());
6372}
6373
6374void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
David Brazdilcb1c0552015-08-04 16:22:25 +01006375 __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), GetExceptionTlsAddress());
6376}
6377
6378void LocationsBuilderX86::VisitClearException(HClearException* clear) {
6379 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
6380}
6381
6382void InstructionCodeGeneratorX86::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6383 __ fs()->movl(GetExceptionTlsAddress(), Immediate(0));
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006384}
6385
6386void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
6387 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006388 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006389 InvokeRuntimeCallingConvention calling_convention;
6390 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6391}
6392
6393void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006394 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006395 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
Nicolas Geoffrayde58ab22014-11-05 12:46:03 +00006396}
6397
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006398// Temp is used for read barrier.
6399static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
6400 if (kEmitCompilerReadBarrier &&
Vladimir Marko953437b2016-08-24 08:30:46 +00006401 !kUseBakerReadBarrier &&
6402 (type_check_kind == TypeCheckKind::kAbstractClassCheck ||
Roland Levillain7c1559a2015-12-15 10:55:36 +00006403 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006404 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
6405 return 1;
6406 }
6407 return 0;
6408}
6409
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006410// Interface case has 3 temps, one for holding the number of interfaces, one for the current
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006411// interface pointer, one for loading the current interface.
6412// The other checks have one temp for loading the object's class.
6413static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
6414 if (type_check_kind == TypeCheckKind::kInterfaceCheck && !kPoisonHeapReferences) {
6415 return 2;
6416 }
6417 return 1 + NumberOfInstanceOfTemps(type_check_kind);
Roland Levillain7c1559a2015-12-15 10:55:36 +00006418}
6419
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006420void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006421 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006422 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Vladimir Marko70e97462016-08-09 11:04:26 +01006423 bool baker_read_barrier_slow_path = false;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006424 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006425 case TypeCheckKind::kExactCheck:
6426 case TypeCheckKind::kAbstractClassCheck:
6427 case TypeCheckKind::kClassHierarchyCheck:
6428 case TypeCheckKind::kArrayObjectCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006429 call_kind =
6430 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Vladimir Marko70e97462016-08-09 11:04:26 +01006431 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006432 break;
6433 case TypeCheckKind::kArrayCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006434 case TypeCheckKind::kUnresolvedCheck:
6435 case TypeCheckKind::kInterfaceCheck:
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006436 call_kind = LocationSummary::kCallOnSlowPath;
6437 break;
6438 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006439
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006440 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Vladimir Marko70e97462016-08-09 11:04:26 +01006441 if (baker_read_barrier_slow_path) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006442 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01006443 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006444 locations->SetInAt(0, Location::RequiresRegister());
6445 locations->SetInAt(1, Location::Any());
6446 // Note that TypeCheckSlowPathX86 uses this "out" register too.
6447 locations->SetOut(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006448 // When read barriers are enabled, we need a temporary register for some cases.
6449 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006450}
6451
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006452void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006453 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006454 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006455 Location obj_loc = locations->InAt(0);
6456 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006457 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006458 Location out_loc = locations->Out();
6459 Register out = out_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006460 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6461 DCHECK_LE(num_temps, 1u);
6462 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006463 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006464 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6465 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6466 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Andreas Gampe85b62f22015-09-09 13:15:38 -07006467 SlowPathCode* slow_path = nullptr;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006468 NearLabel done, zero;
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006469
6470 // Return 0 if `obj` is null.
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006471 // Avoid null check if we know obj is not null.
6472 if (instruction->MustDoNullCheck()) {
6473 __ testl(obj, obj);
6474 __ j(kEqual, &zero);
6475 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006476
Roland Levillain7c1559a2015-12-15 10:55:36 +00006477 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006478 case TypeCheckKind::kExactCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006479 // /* HeapReference<Class> */ out = obj->klass_
6480 GenerateReferenceLoadTwoRegisters(instruction,
6481 out_loc,
6482 obj_loc,
6483 class_offset,
6484 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006485 if (cls.IsRegister()) {
6486 __ cmpl(out, cls.AsRegister<Register>());
6487 } else {
6488 DCHECK(cls.IsStackSlot()) << cls;
6489 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6490 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006491
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006492 // Classes must be equal for the instanceof to succeed.
6493 __ j(kNotEqual, &zero);
6494 __ movl(out, Immediate(1));
6495 __ jmp(&done);
6496 break;
6497 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006498
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006499 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006500 // /* HeapReference<Class> */ out = obj->klass_
6501 GenerateReferenceLoadTwoRegisters(instruction,
6502 out_loc,
6503 obj_loc,
6504 class_offset,
6505 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006506 // If the class is abstract, we eagerly fetch the super class of the
6507 // object to avoid doing a comparison we know will fail.
6508 NearLabel loop;
6509 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006510 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006511 GenerateReferenceLoadOneRegister(instruction,
6512 out_loc,
6513 super_offset,
6514 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006515 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006516 __ testl(out, out);
6517 // If `out` is null, we use it for the result, and jump to `done`.
6518 __ j(kEqual, &done);
6519 if (cls.IsRegister()) {
6520 __ cmpl(out, cls.AsRegister<Register>());
6521 } else {
6522 DCHECK(cls.IsStackSlot()) << cls;
6523 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6524 }
6525 __ j(kNotEqual, &loop);
6526 __ movl(out, Immediate(1));
6527 if (zero.IsLinked()) {
6528 __ jmp(&done);
6529 }
6530 break;
6531 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006532
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006533 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006534 // /* HeapReference<Class> */ out = obj->klass_
6535 GenerateReferenceLoadTwoRegisters(instruction,
6536 out_loc,
6537 obj_loc,
6538 class_offset,
6539 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006540 // Walk over the class hierarchy to find a match.
6541 NearLabel loop, success;
6542 __ Bind(&loop);
6543 if (cls.IsRegister()) {
6544 __ cmpl(out, cls.AsRegister<Register>());
6545 } else {
6546 DCHECK(cls.IsStackSlot()) << cls;
6547 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6548 }
6549 __ j(kEqual, &success);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006550 // /* HeapReference<Class> */ out = out->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006551 GenerateReferenceLoadOneRegister(instruction,
6552 out_loc,
6553 super_offset,
6554 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006555 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006556 __ testl(out, out);
6557 __ j(kNotEqual, &loop);
6558 // If `out` is null, we use it for the result, and jump to `done`.
6559 __ jmp(&done);
6560 __ Bind(&success);
6561 __ movl(out, Immediate(1));
6562 if (zero.IsLinked()) {
6563 __ jmp(&done);
6564 }
6565 break;
6566 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006567
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006568 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006569 // /* HeapReference<Class> */ out = obj->klass_
6570 GenerateReferenceLoadTwoRegisters(instruction,
6571 out_loc,
6572 obj_loc,
6573 class_offset,
6574 kCompilerReadBarrierOption);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006575 // Do an exact check.
6576 NearLabel exact_check;
6577 if (cls.IsRegister()) {
6578 __ cmpl(out, cls.AsRegister<Register>());
6579 } else {
6580 DCHECK(cls.IsStackSlot()) << cls;
6581 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6582 }
6583 __ j(kEqual, &exact_check);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006584 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006585 // /* HeapReference<Class> */ out = out->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006586 GenerateReferenceLoadOneRegister(instruction,
6587 out_loc,
6588 component_offset,
6589 maybe_temp_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006590 kCompilerReadBarrierOption);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006591 __ testl(out, out);
6592 // If `out` is null, we use it for the result, and jump to `done`.
6593 __ j(kEqual, &done);
6594 __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
6595 __ j(kNotEqual, &zero);
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006596 __ Bind(&exact_check);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006597 __ movl(out, Immediate(1));
6598 __ jmp(&done);
6599 break;
6600 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006601
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006602 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier9fd8c602016-11-14 14:38:53 -08006603 // No read barrier since the slow path will retry upon failure.
6604 // /* HeapReference<Class> */ out = obj->klass_
6605 GenerateReferenceLoadTwoRegisters(instruction,
6606 out_loc,
6607 obj_loc,
6608 class_offset,
6609 kWithoutReadBarrier);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006610 if (cls.IsRegister()) {
6611 __ cmpl(out, cls.AsRegister<Register>());
6612 } else {
6613 DCHECK(cls.IsStackSlot()) << cls;
6614 __ cmpl(out, Address(ESP, cls.GetStackIndex()));
6615 }
6616 DCHECK(locations->OnlyCallsOnSlowPath());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006617 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6618 /* is_fatal */ false);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006619 codegen_->AddSlowPath(slow_path);
6620 __ j(kNotEqual, slow_path->GetEntryLabel());
6621 __ movl(out, Immediate(1));
6622 if (zero.IsLinked()) {
6623 __ jmp(&done);
6624 }
6625 break;
6626 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006627
Calin Juravle98893e12015-10-02 21:05:03 +01006628 case TypeCheckKind::kUnresolvedCheck:
Roland Levillain0d5a2812015-11-13 10:07:31 +00006629 case TypeCheckKind::kInterfaceCheck: {
6630 // Note that we indeed only call on slow path, but we always go
Roland Levillaine3f43ac2016-01-19 15:07:47 +00006631 // into the slow path for the unresolved and interface check
Roland Levillain0d5a2812015-11-13 10:07:31 +00006632 // cases.
6633 //
6634 // We cannot directly call the InstanceofNonTrivial runtime
6635 // entry point without resorting to a type checking slow path
6636 // here (i.e. by calling InvokeRuntime directly), as it would
6637 // require to assign fixed registers for the inputs of this
6638 // HInstanceOf instruction (following the runtime calling
6639 // convention), which might be cluttered by the potential first
6640 // read barrier emission at the beginning of this method.
Roland Levillain7c1559a2015-12-15 10:55:36 +00006641 //
6642 // TODO: Introduce a new runtime entry point taking the object
6643 // to test (instead of its class) as argument, and let it deal
6644 // with the read barrier issues. This will let us refactor this
6645 // case of the `switch` code as it was previously (with a direct
6646 // call to the runtime not using a type checking slow path).
6647 // This should also be beneficial for the other cases above.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006648 DCHECK(locations->OnlyCallsOnSlowPath());
6649 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6650 /* is_fatal */ false);
6651 codegen_->AddSlowPath(slow_path);
6652 __ jmp(slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006653 if (zero.IsLinked()) {
6654 __ jmp(&done);
6655 }
6656 break;
6657 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006658 }
6659
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006660 if (zero.IsLinked()) {
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006661 __ Bind(&zero);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006662 __ xorl(out, out);
6663 }
6664
6665 if (done.IsLinked()) {
6666 __ Bind(&done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006667 }
6668
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006669 if (slow_path != nullptr) {
6670 __ Bind(slow_path->GetExitLabel());
6671 }
Nicolas Geoffray6f5c41f2014-11-06 08:59:20 +00006672}
6673
Mathieu Chartier5ac321b2016-11-09 16:33:54 -08006674static bool IsTypeCheckSlowPathFatal(TypeCheckKind type_check_kind, bool throws_into_catch) {
6675 switch (type_check_kind) {
6676 case TypeCheckKind::kExactCheck:
6677 case TypeCheckKind::kAbstractClassCheck:
6678 case TypeCheckKind::kClassHierarchyCheck:
6679 case TypeCheckKind::kArrayObjectCheck:
6680 return !throws_into_catch && !kEmitCompilerReadBarrier;
6681 case TypeCheckKind::kInterfaceCheck:
6682 return !throws_into_catch && !kEmitCompilerReadBarrier && !kPoisonHeapReferences;
6683 case TypeCheckKind::kArrayCheck:
6684 case TypeCheckKind::kUnresolvedCheck:
6685 return false;
6686 }
6687 LOG(FATAL) << "Unreachable";
6688 UNREACHABLE();
6689}
6690
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006691void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006692 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006693 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Mathieu Chartier5ac321b2016-11-09 16:33:54 -08006694 LocationSummary::CallKind call_kind =
6695 IsTypeCheckSlowPathFatal(type_check_kind, throws_into_catch)
6696 ? LocationSummary::kNoCall
6697 : LocationSummary::kCallOnSlowPath;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006698 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
6699 locations->SetInAt(0, Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006700 if (type_check_kind == TypeCheckKind::kInterfaceCheck) {
6701 // Require a register for the interface check since there is a loop that compares the class to
6702 // a memory address.
6703 locations->SetInAt(1, Location::RequiresRegister());
6704 } else {
6705 locations->SetInAt(1, Location::Any());
6706 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006707 // Note that TypeCheckSlowPathX86 uses this "temp" register too.
6708 locations->AddTemp(Location::RequiresRegister());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006709 // When read barriers are enabled, we need an additional temporary register for some cases.
6710 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
6711}
6712
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006713void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00006714 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006715 LocationSummary* locations = instruction->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +00006716 Location obj_loc = locations->InAt(0);
6717 Register obj = obj_loc.AsRegister<Register>();
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006718 Location cls = locations->InAt(1);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006719 Location temp_loc = locations->GetTemp(0);
6720 Register temp = temp_loc.AsRegister<Register>();
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006721 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
6722 DCHECK_GE(num_temps, 1u);
6723 DCHECK_LE(num_temps, 2u);
6724 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
6725 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6726 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6727 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6728 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
6729 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
6730 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
6731 const uint32_t object_array_data_offset =
6732 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006733
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006734 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
6735 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
6736 // read barriers is done for performance and code size reasons.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006737 bool is_type_check_slow_path_fatal =
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006738 IsTypeCheckSlowPathFatal(type_check_kind, instruction->CanThrowIntoCatchBlock());
6739
Roland Levillain0d5a2812015-11-13 10:07:31 +00006740 SlowPathCode* type_check_slow_path =
6741 new (GetGraph()->GetArena()) TypeCheckSlowPathX86(instruction,
6742 is_type_check_slow_path_fatal);
6743 codegen_->AddSlowPath(type_check_slow_path);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006744
Roland Levillain0d5a2812015-11-13 10:07:31 +00006745 NearLabel done;
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006746 // Avoid null check if we know obj is not null.
6747 if (instruction->MustDoNullCheck()) {
6748 __ testl(obj, obj);
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006749 __ j(kEqual, &done);
Guillaume "Vermeille" Sanchezaf888352015-04-20 14:41:30 +01006750 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006751
Roland Levillain0d5a2812015-11-13 10:07:31 +00006752 switch (type_check_kind) {
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006753 case TypeCheckKind::kExactCheck:
6754 case TypeCheckKind::kArrayCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006755 // /* HeapReference<Class> */ temp = obj->klass_
6756 GenerateReferenceLoadTwoRegisters(instruction,
6757 temp_loc,
6758 obj_loc,
6759 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006760 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006761
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006762 if (cls.IsRegister()) {
6763 __ cmpl(temp, cls.AsRegister<Register>());
6764 } else {
6765 DCHECK(cls.IsStackSlot()) << cls;
6766 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6767 }
6768 // Jump to slow path for throwing the exception or doing a
6769 // more involved array check.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006770 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006771 break;
6772 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006773
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006774 case TypeCheckKind::kAbstractClassCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006775 // /* HeapReference<Class> */ temp = obj->klass_
6776 GenerateReferenceLoadTwoRegisters(instruction,
6777 temp_loc,
6778 obj_loc,
6779 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006780 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006781
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006782 // If the class is abstract, we eagerly fetch the super class of the
6783 // object to avoid doing a comparison we know will fail.
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006784 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006785 __ Bind(&loop);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006786 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006787 GenerateReferenceLoadOneRegister(instruction,
6788 temp_loc,
6789 super_offset,
6790 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006791 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006792
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006793 // If the class reference currently in `temp` is null, jump to the slow path to throw the
6794 // exception.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006795 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006796 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006797
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006798 // Otherwise, compare the classes
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006799 if (cls.IsRegister()) {
6800 __ cmpl(temp, cls.AsRegister<Register>());
6801 } else {
6802 DCHECK(cls.IsStackSlot()) << cls;
6803 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6804 }
6805 __ j(kNotEqual, &loop);
6806 break;
6807 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006808
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006809 case TypeCheckKind::kClassHierarchyCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006810 // /* HeapReference<Class> */ temp = obj->klass_
6811 GenerateReferenceLoadTwoRegisters(instruction,
6812 temp_loc,
6813 obj_loc,
6814 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006815 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006816
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006817 // Walk over the class hierarchy to find a match.
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006818 NearLabel loop;
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006819 __ Bind(&loop);
6820 if (cls.IsRegister()) {
6821 __ cmpl(temp, cls.AsRegister<Register>());
6822 } else {
6823 DCHECK(cls.IsStackSlot()) << cls;
6824 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6825 }
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006826 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006827
Roland Levillain0d5a2812015-11-13 10:07:31 +00006828 // /* HeapReference<Class> */ temp = temp->super_class_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006829 GenerateReferenceLoadOneRegister(instruction,
6830 temp_loc,
6831 super_offset,
6832 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006833 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006834
6835 // If the class reference currently in `temp` is not null, jump
6836 // back at the beginning of the loop.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006837 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006838 __ j(kNotZero, &loop);
6839 // Otherwise, jump to the slow path to throw the exception.;
Roland Levillain0d5a2812015-11-13 10:07:31 +00006840 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006841 break;
6842 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006843
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006844 case TypeCheckKind::kArrayObjectCheck: {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006845 // /* HeapReference<Class> */ temp = obj->klass_
6846 GenerateReferenceLoadTwoRegisters(instruction,
6847 temp_loc,
6848 obj_loc,
6849 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006850 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006851
Nicolas Geoffrayabfcf182015-09-21 18:41:21 +01006852 // Do an exact check.
6853 if (cls.IsRegister()) {
6854 __ cmpl(temp, cls.AsRegister<Register>());
6855 } else {
6856 DCHECK(cls.IsStackSlot()) << cls;
6857 __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
6858 }
6859 __ j(kEqual, &done);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006860
6861 // Otherwise, we need to check that the object's class is a non-primitive array.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006862 // /* HeapReference<Class> */ temp = temp->component_type_
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006863 GenerateReferenceLoadOneRegister(instruction,
6864 temp_loc,
6865 component_offset,
6866 maybe_temp2_loc,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006867 kWithoutReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00006868
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006869 // If the component type is null (i.e. the object not an array), jump to the slow path to
6870 // throw the exception. Otherwise proceed with the check.
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006871 __ testl(temp, temp);
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006872 __ j(kZero, type_check_slow_path->GetEntryLabel());
Roland Levillain0d5a2812015-11-13 10:07:31 +00006873
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006874 __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08006875 __ j(kNotEqual, type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006876 break;
6877 }
Roland Levillain0d5a2812015-11-13 10:07:31 +00006878
Calin Juravle98893e12015-10-02 21:05:03 +01006879 case TypeCheckKind::kUnresolvedCheck:
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006880 // We always go into the type check slow path for the unresolved check case.
Roland Levillain0d5a2812015-11-13 10:07:31 +00006881 // We cannot directly call the CheckCast runtime entry point
6882 // without resorting to a type checking slow path here (i.e. by
6883 // calling InvokeRuntime directly), as it would require to
6884 // assign fixed registers for the inputs of this HInstanceOf
6885 // instruction (following the runtime calling convention), which
6886 // might be cluttered by the potential first read barrier
6887 // emission at the beginning of this method.
6888 __ jmp(type_check_slow_path->GetEntryLabel());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006889 break;
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006890
6891 case TypeCheckKind::kInterfaceCheck: {
6892 // Fast path for the interface check. Since we compare with a memory location in the inner
6893 // loop we would need to have cls poisoned. However unpoisoning cls would reset the
6894 // conditional flags and cause the conditional jump to be incorrect. Therefore we just jump
Mathieu Chartieraa474eb2016-11-09 15:18:27 -08006895 // to the slow path if we are running under poisoning.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006896 if (!kPoisonHeapReferences) {
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006897 // Try to avoid read barriers to improve the fast path. We can not get false positives by
6898 // doing this.
6899 // /* HeapReference<Class> */ temp = obj->klass_
6900 GenerateReferenceLoadTwoRegisters(instruction,
6901 temp_loc,
6902 obj_loc,
6903 class_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006904 kWithoutReadBarrier);
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006905
6906 // /* HeapReference<Class> */ temp = temp->iftable_
6907 GenerateReferenceLoadTwoRegisters(instruction,
6908 temp_loc,
6909 temp_loc,
6910 iftable_offset,
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08006911 kWithoutReadBarrier);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006912 // Iftable is never null.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006913 __ movl(maybe_temp2_loc.AsRegister<Register>(), Address(temp, array_length_offset));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006914 // Loop through the iftable and check if any class matches.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006915 NearLabel start_loop;
6916 __ Bind(&start_loop);
Mathieu Chartier6beced42016-11-15 15:51:31 -08006917 // Need to subtract first to handle the empty array case.
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006918 __ subl(maybe_temp2_loc.AsRegister<Register>(), Immediate(2));
Mathieu Chartier6beced42016-11-15 15:51:31 -08006919 __ j(kNegative, type_check_slow_path->GetEntryLabel());
6920 // Go to next interface if the classes do not match.
6921 __ cmpl(cls.AsRegister<Register>(),
6922 CodeGeneratorX86::ArrayAddress(temp,
6923 maybe_temp2_loc,
6924 TIMES_4,
6925 object_array_data_offset));
6926 __ j(kNotEqual, &start_loop);
6927 } else {
6928 __ jmp(type_check_slow_path->GetEntryLabel());
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006929 }
Mathieu Chartier5c44c1b2016-11-04 18:13:04 -07006930 break;
6931 }
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00006932 }
6933 __ Bind(&done);
6934
Roland Levillain0d5a2812015-11-13 10:07:31 +00006935 __ Bind(type_check_slow_path->GetExitLabel());
Nicolas Geoffray57a88d42014-11-10 15:09:21 +00006936}
6937
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006938void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
6939 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006940 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006941 InvokeRuntimeCallingConvention calling_convention;
6942 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6943}
6944
6945void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescuba45db02016-07-12 22:53:02 +01006946 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject
6947 : kQuickUnlockObject,
Alexandre Rames8158f282015-08-07 10:26:17 +01006948 instruction,
Serban Constantinescuba45db02016-07-12 22:53:02 +01006949 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006950 if (instruction->IsEnter()) {
6951 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6952 } else {
6953 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6954 }
Nicolas Geoffrayb7baf5c2014-11-11 16:29:44 +00006955}
6956
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006957void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
6958void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
6959void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
6960
6961void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6962 LocationSummary* locations =
6963 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6964 DCHECK(instruction->GetResultType() == Primitive::kPrimInt
6965 || instruction->GetResultType() == Primitive::kPrimLong);
6966 locations->SetInAt(0, Location::RequiresRegister());
6967 locations->SetInAt(1, Location::Any());
6968 locations->SetOut(Location::SameAsFirstInput());
6969}
6970
6971void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
6972 HandleBitwiseOperation(instruction);
6973}
6974
6975void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
6976 HandleBitwiseOperation(instruction);
6977}
6978
6979void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
6980 HandleBitwiseOperation(instruction);
6981}
6982
6983void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
6984 LocationSummary* locations = instruction->GetLocations();
6985 Location first = locations->InAt(0);
6986 Location second = locations->InAt(1);
6987 DCHECK(first.Equals(locations->Out()));
6988
6989 if (instruction->GetResultType() == Primitive::kPrimInt) {
6990 if (second.IsRegister()) {
6991 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006992 __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006993 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00006994 __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006995 } else {
6996 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00006997 __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00006998 }
6999 } else if (second.IsConstant()) {
7000 if (instruction->IsAnd()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007001 __ andl(first.AsRegister<Register>(),
7002 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007003 } else if (instruction->IsOr()) {
Roland Levillain199f3362014-11-27 17:15:16 +00007004 __ orl(first.AsRegister<Register>(),
7005 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007006 } else {
7007 DCHECK(instruction->IsXor());
Roland Levillain199f3362014-11-27 17:15:16 +00007008 __ xorl(first.AsRegister<Register>(),
7009 Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007010 }
7011 } else {
7012 if (instruction->IsAnd()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007013 __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007014 } else if (instruction->IsOr()) {
Roland Levillain271ab9c2014-11-27 15:23:57 +00007015 __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007016 } else {
7017 DCHECK(instruction->IsXor());
Roland Levillain271ab9c2014-11-27 15:23:57 +00007018 __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007019 }
7020 }
7021 } else {
7022 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
7023 if (second.IsRegisterPair()) {
7024 if (instruction->IsAnd()) {
7025 __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7026 __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7027 } else if (instruction->IsOr()) {
7028 __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7029 __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7030 } else {
7031 DCHECK(instruction->IsXor());
7032 __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
7033 __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
7034 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007035 } else if (second.IsDoubleStackSlot()) {
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007036 if (instruction->IsAnd()) {
7037 __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7038 __ andl(first.AsRegisterPairHigh<Register>(),
7039 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7040 } else if (instruction->IsOr()) {
7041 __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7042 __ orl(first.AsRegisterPairHigh<Register>(),
7043 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7044 } else {
7045 DCHECK(instruction->IsXor());
7046 __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
7047 __ xorl(first.AsRegisterPairHigh<Register>(),
7048 Address(ESP, second.GetHighStackIndex(kX86WordSize)));
7049 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007050 } else {
7051 DCHECK(second.IsConstant()) << second;
7052 int64_t value = second.GetConstant()->AsLongConstant()->GetValue();
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007053 int32_t low_value = Low32Bits(value);
7054 int32_t high_value = High32Bits(value);
7055 Immediate low(low_value);
7056 Immediate high(high_value);
7057 Register first_low = first.AsRegisterPairLow<Register>();
7058 Register first_high = first.AsRegisterPairHigh<Register>();
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007059 if (instruction->IsAnd()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007060 if (low_value == 0) {
7061 __ xorl(first_low, first_low);
7062 } else if (low_value != -1) {
7063 __ andl(first_low, low);
7064 }
7065 if (high_value == 0) {
7066 __ xorl(first_high, first_high);
7067 } else if (high_value != -1) {
7068 __ andl(first_high, high);
7069 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007070 } else if (instruction->IsOr()) {
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007071 if (low_value != 0) {
7072 __ orl(first_low, low);
7073 }
7074 if (high_value != 0) {
7075 __ orl(first_high, high);
7076 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007077 } else {
7078 DCHECK(instruction->IsXor());
Mark Mendell3f6c7f62015-03-13 13:47:53 -04007079 if (low_value != 0) {
7080 __ xorl(first_low, low);
7081 }
7082 if (high_value != 0) {
7083 __ xorl(first_high, high);
7084 }
Nicolas Geoffray234d69d2015-03-09 10:28:50 +00007085 }
Nicolas Geoffray9574c4b2014-11-12 13:19:37 +00007086 }
7087 }
7088}
7089
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007090void InstructionCodeGeneratorX86::GenerateReferenceLoadOneRegister(
7091 HInstruction* instruction,
7092 Location out,
7093 uint32_t offset,
7094 Location maybe_temp,
7095 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007096 Register out_reg = out.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007097 if (read_barrier_option == kWithReadBarrier) {
7098 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007099 if (kUseBakerReadBarrier) {
7100 // Load with fast path based Baker's read barrier.
7101 // /* HeapReference<Object> */ out = *(out + offset)
7102 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00007103 instruction, out, out_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007104 } else {
7105 // Load with slow path based read barrier.
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007106 // Save the value of `out` into `maybe_temp` before overwriting it
Roland Levillain7c1559a2015-12-15 10:55:36 +00007107 // in the following move operation, as we will need it for the
7108 // read barrier below.
Vladimir Marko953437b2016-08-24 08:30:46 +00007109 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007110 __ movl(maybe_temp.AsRegister<Register>(), out_reg);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007111 // /* HeapReference<Object> */ out = *(out + offset)
7112 __ movl(out_reg, Address(out_reg, offset));
Roland Levillain95e7ffc2016-01-22 11:57:25 +00007113 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007114 }
7115 } else {
7116 // Plain load with no read barrier.
7117 // /* HeapReference<Object> */ out = *(out + offset)
7118 __ movl(out_reg, Address(out_reg, offset));
7119 __ MaybeUnpoisonHeapReference(out_reg);
7120 }
7121}
7122
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007123void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(
7124 HInstruction* instruction,
7125 Location out,
7126 Location obj,
7127 uint32_t offset,
7128 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007129 Register out_reg = out.AsRegister<Register>();
7130 Register obj_reg = obj.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007131 if (read_barrier_option == kWithReadBarrier) {
7132 CHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007133 if (kUseBakerReadBarrier) {
7134 // Load with fast path based Baker's read barrier.
7135 // /* HeapReference<Object> */ out = *(obj + offset)
7136 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00007137 instruction, out, obj_reg, offset, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007138 } else {
7139 // Load with slow path based read barrier.
7140 // /* HeapReference<Object> */ out = *(obj + offset)
7141 __ movl(out_reg, Address(obj_reg, offset));
7142 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
7143 }
7144 } else {
7145 // Plain load with no read barrier.
7146 // /* HeapReference<Object> */ out = *(obj + offset)
7147 __ movl(out_reg, Address(obj_reg, offset));
7148 __ MaybeUnpoisonHeapReference(out_reg);
7149 }
7150}
7151
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007152void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(
7153 HInstruction* instruction,
7154 Location root,
7155 const Address& address,
7156 Label* fixup_label,
7157 ReadBarrierOption read_barrier_option) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007158 Register root_reg = root.AsRegister<Register>();
Mathieu Chartier3af00dc2016-11-10 11:25:57 -08007159 if (read_barrier_option == kWithReadBarrier) {
Mathieu Chartier31b12e32016-09-02 17:11:57 -07007160 DCHECK(kEmitCompilerReadBarrier);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007161 if (kUseBakerReadBarrier) {
7162 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
7163 // Baker's read barrier are used:
7164 //
Roland Levillaind966ce72017-02-09 16:20:14 +00007165 // root = obj.field;
7166 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
7167 // if (temp != null) {
7168 // root = temp(root)
Roland Levillain7c1559a2015-12-15 10:55:36 +00007169 // }
7170
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007171 // /* GcRoot<mirror::Object> */ root = *address
7172 __ movl(root_reg, address);
7173 if (fixup_label != nullptr) {
7174 __ Bind(fixup_label);
7175 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007176 static_assert(
7177 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
7178 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
7179 "have different sizes.");
7180 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
7181 "art::mirror::CompressedReference<mirror::Object> and int32_t "
7182 "have different sizes.");
7183
Vladimir Marko953437b2016-08-24 08:30:46 +00007184 // Slow path marking the GC root `root`.
7185 SlowPathCode* slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007186 instruction, root, /* unpoison_ref_before_marking */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007187 codegen_->AddSlowPath(slow_path);
7188
Roland Levillaind966ce72017-02-09 16:20:14 +00007189 // Test the entrypoint (`Thread::Current()->pReadBarrierMarkReg ## root.reg()`).
7190 const int32_t entry_point_offset =
7191 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(root.reg());
7192 __ fs()->cmpl(Address::Absolute(entry_point_offset), Immediate(0));
7193 // The entrypoint is null when the GC is not marking.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007194 __ j(kNotEqual, slow_path->GetEntryLabel());
7195 __ Bind(slow_path->GetExitLabel());
7196 } else {
7197 // GC root loaded through a slow path for read barriers other
7198 // than Baker's.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007199 // /* GcRoot<mirror::Object>* */ root = address
7200 __ leal(root_reg, address);
7201 if (fixup_label != nullptr) {
7202 __ Bind(fixup_label);
7203 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007204 // /* mirror::Object* */ root = root->Read()
7205 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
7206 }
7207 } else {
7208 // Plain GC root load with no read barrier.
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007209 // /* GcRoot<mirror::Object> */ root = *address
7210 __ movl(root_reg, address);
7211 if (fixup_label != nullptr) {
7212 __ Bind(fixup_label);
7213 }
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007214 // Note that GC roots are not affected by heap poisoning, thus we
7215 // do not have to unpoison `root_reg` here.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007216 }
7217}
7218
7219void CodeGeneratorX86::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
7220 Location ref,
7221 Register obj,
7222 uint32_t offset,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007223 bool needs_null_check) {
7224 DCHECK(kEmitCompilerReadBarrier);
7225 DCHECK(kUseBakerReadBarrier);
7226
7227 // /* HeapReference<Object> */ ref = *(obj + offset)
7228 Address src(obj, offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007229 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007230}
7231
7232void CodeGeneratorX86::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
7233 Location ref,
7234 Register obj,
7235 uint32_t data_offset,
7236 Location index,
Roland Levillain7c1559a2015-12-15 10:55:36 +00007237 bool needs_null_check) {
7238 DCHECK(kEmitCompilerReadBarrier);
7239 DCHECK(kUseBakerReadBarrier);
7240
Roland Levillain3d312422016-06-23 13:53:42 +01007241 static_assert(
7242 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
7243 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Roland Levillain7c1559a2015-12-15 10:55:36 +00007244 // /* HeapReference<Object> */ ref =
7245 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007246 Address src = CodeGeneratorX86::ArrayAddress(obj, index, TIMES_4, data_offset);
Vladimir Marko953437b2016-08-24 08:30:46 +00007247 GenerateReferenceLoadWithBakerReadBarrier(instruction, ref, obj, src, needs_null_check);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007248}
7249
7250void CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
7251 Location ref,
7252 Register obj,
7253 const Address& src,
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007254 bool needs_null_check,
7255 bool always_update_field,
7256 Register* temp) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007257 DCHECK(kEmitCompilerReadBarrier);
7258 DCHECK(kUseBakerReadBarrier);
7259
7260 // In slow path based read barriers, the read barrier call is
7261 // inserted after the original load. However, in fast path based
7262 // Baker's read barriers, we need to perform the load of
7263 // mirror::Object::monitor_ *before* the original reference load.
7264 // This load-load ordering is required by the read barrier.
7265 // The fast path/slow path (for Baker's algorithm) should look like:
7266 //
7267 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
7268 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
7269 // HeapReference<Object> ref = *src; // Original reference load.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007270 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007271 // if (is_gray) {
7272 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
7273 // }
7274 //
7275 // Note: the original implementation in ReadBarrier::Barrier is
7276 // slightly more complex as:
7277 // - it implements the load-load fence using a data dependency on
Roland Levillaine3f43ac2016-01-19 15:07:47 +00007278 // the high-bits of rb_state, which are expected to be all zeroes
7279 // (we use CodeGeneratorX86::GenerateMemoryBarrier instead here,
7280 // which is a no-op thanks to the x86 memory model);
Roland Levillain7c1559a2015-12-15 10:55:36 +00007281 // - it performs additional checks that we do not do here for
7282 // performance reasons.
7283
7284 Register ref_reg = ref.AsRegister<Register>();
Roland Levillain7c1559a2015-12-15 10:55:36 +00007285 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
7286
Vladimir Marko953437b2016-08-24 08:30:46 +00007287 // Given the numeric representation, it's enough to check the low bit of the rb_state.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007288 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
7289 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Vladimir Marko953437b2016-08-24 08:30:46 +00007290 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
7291 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
7292 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
7293
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07007294 // if (rb_state == ReadBarrier::GrayState())
Vladimir Marko953437b2016-08-24 08:30:46 +00007295 // ref = ReadBarrier::Mark(ref);
7296 // At this point, just do the "if" and make sure that flags are preserved until the branch.
7297 __ testb(Address(obj, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain7c1559a2015-12-15 10:55:36 +00007298 if (needs_null_check) {
7299 MaybeRecordImplicitNullCheck(instruction);
7300 }
Roland Levillain7c1559a2015-12-15 10:55:36 +00007301
7302 // Load fence to prevent load-load reordering.
7303 // Note that this is a no-op, thanks to the x86 memory model.
7304 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
7305
7306 // The actual reference load.
7307 // /* HeapReference<Object> */ ref = *src
Vladimir Marko953437b2016-08-24 08:30:46 +00007308 __ movl(ref_reg, src); // Flags are unaffected.
7309
7310 // Note: Reference unpoisoning modifies the flags, so we need to delay it after the branch.
7311 // Slow path marking the object `ref` when it is gray.
Roland Levillaina1aa3b12016-10-26 13:03:38 +01007312 SlowPathCode* slow_path;
7313 if (always_update_field) {
7314 DCHECK(temp != nullptr);
7315 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkAndUpdateFieldSlowPathX86(
7316 instruction, ref, obj, src, /* unpoison_ref_before_marking */ true, *temp);
7317 } else {
7318 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathX86(
7319 instruction, ref, /* unpoison_ref_before_marking */ true);
7320 }
Vladimir Marko953437b2016-08-24 08:30:46 +00007321 AddSlowPath(slow_path);
7322
7323 // We have done the "if" of the gray bit check above, now branch based on the flags.
7324 __ j(kNotZero, slow_path->GetEntryLabel());
Roland Levillain7c1559a2015-12-15 10:55:36 +00007325
7326 // Object* ref = ref_addr->AsMirrorPtr()
7327 __ MaybeUnpoisonHeapReference(ref_reg);
7328
Roland Levillain7c1559a2015-12-15 10:55:36 +00007329 __ Bind(slow_path->GetExitLabel());
7330}
7331
7332void CodeGeneratorX86::GenerateReadBarrierSlow(HInstruction* instruction,
7333 Location out,
7334 Location ref,
7335 Location obj,
7336 uint32_t offset,
7337 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007338 DCHECK(kEmitCompilerReadBarrier);
7339
Roland Levillain7c1559a2015-12-15 10:55:36 +00007340 // Insert a slow path based read barrier *after* the reference load.
7341 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007342 // If heap poisoning is enabled, the unpoisoning of the loaded
7343 // reference will be carried out by the runtime within the slow
7344 // path.
7345 //
7346 // Note that `ref` currently does not get unpoisoned (when heap
7347 // poisoning is enabled), which is alright as the `ref` argument is
7348 // not used by the artReadBarrierSlow entry point.
7349 //
7350 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
7351 SlowPathCode* slow_path = new (GetGraph()->GetArena())
7352 ReadBarrierForHeapReferenceSlowPathX86(instruction, out, ref, obj, offset, index);
7353 AddSlowPath(slow_path);
7354
Roland Levillain0d5a2812015-11-13 10:07:31 +00007355 __ jmp(slow_path->GetEntryLabel());
7356 __ Bind(slow_path->GetExitLabel());
7357}
7358
Roland Levillain7c1559a2015-12-15 10:55:36 +00007359void CodeGeneratorX86::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7360 Location out,
7361 Location ref,
7362 Location obj,
7363 uint32_t offset,
7364 Location index) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007365 if (kEmitCompilerReadBarrier) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007366 // Baker's read barriers shall be handled by the fast path
7367 // (CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier).
7368 DCHECK(!kUseBakerReadBarrier);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007369 // If heap poisoning is enabled, unpoisoning will be taken care of
7370 // by the runtime within the slow path.
Roland Levillain7c1559a2015-12-15 10:55:36 +00007371 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
Roland Levillain0d5a2812015-11-13 10:07:31 +00007372 } else if (kPoisonHeapReferences) {
7373 __ UnpoisonHeapReference(out.AsRegister<Register>());
7374 }
7375}
7376
Roland Levillain7c1559a2015-12-15 10:55:36 +00007377void CodeGeneratorX86::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7378 Location out,
7379 Location root) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00007380 DCHECK(kEmitCompilerReadBarrier);
7381
Roland Levillain7c1559a2015-12-15 10:55:36 +00007382 // Insert a slow path based read barrier *after* the GC root load.
7383 //
Roland Levillain0d5a2812015-11-13 10:07:31 +00007384 // Note that GC roots are not affected by heap poisoning, so we do
7385 // not need to do anything special for this here.
7386 SlowPathCode* slow_path =
7387 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathX86(instruction, out, root);
7388 AddSlowPath(slow_path);
7389
Roland Levillain0d5a2812015-11-13 10:07:31 +00007390 __ jmp(slow_path->GetEntryLabel());
7391 __ Bind(slow_path->GetExitLabel());
7392}
7393
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007394void LocationsBuilderX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007395 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007396 LOG(FATAL) << "Unreachable";
7397}
7398
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01007399void InstructionCodeGeneratorX86::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
Calin Juravleb1498f62015-02-16 13:13:29 +00007400 // Nothing to do, this should be removed during prepare for register allocator.
Calin Juravleb1498f62015-02-16 13:13:29 +00007401 LOG(FATAL) << "Unreachable";
7402}
7403
Mark Mendellfe57faa2015-09-18 09:26:15 -04007404// Simple implementation of packed switch - generate cascaded compare/jumps.
7405void LocationsBuilderX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7406 LocationSummary* locations =
7407 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7408 locations->SetInAt(0, Location::RequiresRegister());
7409}
7410
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007411void InstructionCodeGeneratorX86::GenPackedSwitchWithCompares(Register value_reg,
7412 int32_t lower_bound,
7413 uint32_t num_entries,
7414 HBasicBlock* switch_block,
7415 HBasicBlock* default_block) {
7416 // Figure out the correct compare values and jump conditions.
7417 // Handle the first compare/branch as a special case because it might
7418 // jump to the default case.
7419 DCHECK_GT(num_entries, 2u);
7420 Condition first_condition;
7421 uint32_t index;
7422 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7423 if (lower_bound != 0) {
7424 first_condition = kLess;
7425 __ cmpl(value_reg, Immediate(lower_bound));
7426 __ j(first_condition, codegen_->GetLabelOf(default_block));
7427 __ j(kEqual, codegen_->GetLabelOf(successors[0]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007428
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007429 index = 1;
7430 } else {
7431 // Handle all the compare/jumps below.
7432 first_condition = kBelow;
7433 index = 0;
7434 }
7435
7436 // Handle the rest of the compare/jumps.
7437 for (; index + 1 < num_entries; index += 2) {
7438 int32_t compare_to_value = lower_bound + index + 1;
7439 __ cmpl(value_reg, Immediate(compare_to_value));
7440 // Jump to successors[index] if value < case_value[index].
7441 __ j(first_condition, codegen_->GetLabelOf(successors[index]));
7442 // Jump to successors[index + 1] if value == case_value[index + 1].
7443 __ j(kEqual, codegen_->GetLabelOf(successors[index + 1]));
7444 }
7445
7446 if (index != num_entries) {
7447 // There are an odd number of entries. Handle the last one.
7448 DCHECK_EQ(index + 1, num_entries);
7449 __ cmpl(value_reg, Immediate(lower_bound + index));
7450 __ j(kEqual, codegen_->GetLabelOf(successors[index]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007451 }
7452
7453 // And the default for any other value.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007454 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
7455 __ jmp(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007456 }
7457}
7458
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007459void InstructionCodeGeneratorX86::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7460 int32_t lower_bound = switch_instr->GetStartValue();
7461 uint32_t num_entries = switch_instr->GetNumEntries();
7462 LocationSummary* locations = switch_instr->GetLocations();
7463 Register value_reg = locations->InAt(0).AsRegister<Register>();
7464
7465 GenPackedSwitchWithCompares(value_reg,
7466 lower_bound,
7467 num_entries,
7468 switch_instr->GetBlock(),
7469 switch_instr->GetDefaultBlock());
7470}
7471
Mark Mendell805b3b52015-09-18 14:10:29 -04007472void LocationsBuilderX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7473 LocationSummary* locations =
7474 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
7475 locations->SetInAt(0, Location::RequiresRegister());
7476
7477 // Constant area pointer.
7478 locations->SetInAt(1, Location::RequiresRegister());
7479
7480 // And the temporary we need.
7481 locations->AddTemp(Location::RequiresRegister());
7482}
7483
7484void InstructionCodeGeneratorX86::VisitX86PackedSwitch(HX86PackedSwitch* switch_instr) {
7485 int32_t lower_bound = switch_instr->GetStartValue();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007486 uint32_t num_entries = switch_instr->GetNumEntries();
Mark Mendell805b3b52015-09-18 14:10:29 -04007487 LocationSummary* locations = switch_instr->GetLocations();
7488 Register value_reg = locations->InAt(0).AsRegister<Register>();
7489 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7490
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007491 if (num_entries <= kPackedSwitchJumpTableThreshold) {
7492 GenPackedSwitchWithCompares(value_reg,
7493 lower_bound,
7494 num_entries,
7495 switch_instr->GetBlock(),
7496 default_block);
7497 return;
7498 }
7499
Mark Mendell805b3b52015-09-18 14:10:29 -04007500 // Optimizing has a jump area.
7501 Register temp_reg = locations->GetTemp(0).AsRegister<Register>();
7502 Register constant_area = locations->InAt(1).AsRegister<Register>();
7503
7504 // Remove the bias, if needed.
7505 if (lower_bound != 0) {
7506 __ leal(temp_reg, Address(value_reg, -lower_bound));
7507 value_reg = temp_reg;
7508 }
7509
7510 // Is the value in range?
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007511 DCHECK_GE(num_entries, 1u);
Mark Mendell805b3b52015-09-18 14:10:29 -04007512 __ cmpl(value_reg, Immediate(num_entries - 1));
7513 __ j(kAbove, codegen_->GetLabelOf(default_block));
7514
7515 // We are in the range of the table.
7516 // Load (target-constant_area) from the jump table, indexing by the value.
7517 __ movl(temp_reg, codegen_->LiteralCaseTable(switch_instr, constant_area, value_reg));
7518
7519 // Compute the actual target address by adding in constant_area.
7520 __ addl(temp_reg, constant_area);
7521
7522 // And jump.
7523 __ jmp(temp_reg);
7524}
7525
Mark Mendell0616ae02015-04-17 12:49:27 -04007526void LocationsBuilderX86::VisitX86ComputeBaseMethodAddress(
7527 HX86ComputeBaseMethodAddress* insn) {
7528 LocationSummary* locations =
7529 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7530 locations->SetOut(Location::RequiresRegister());
7531}
7532
7533void InstructionCodeGeneratorX86::VisitX86ComputeBaseMethodAddress(
7534 HX86ComputeBaseMethodAddress* insn) {
7535 LocationSummary* locations = insn->GetLocations();
7536 Register reg = locations->Out().AsRegister<Register>();
7537
7538 // Generate call to next instruction.
7539 Label next_instruction;
7540 __ call(&next_instruction);
7541 __ Bind(&next_instruction);
7542
7543 // Remember this offset for later use with constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007544 codegen_->AddMethodAddressOffset(insn, GetAssembler()->CodeSize());
Mark Mendell0616ae02015-04-17 12:49:27 -04007545
7546 // Grab the return address off the stack.
7547 __ popl(reg);
7548}
7549
7550void LocationsBuilderX86::VisitX86LoadFromConstantTable(
7551 HX86LoadFromConstantTable* insn) {
7552 LocationSummary* locations =
7553 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
7554
7555 locations->SetInAt(0, Location::RequiresRegister());
7556 locations->SetInAt(1, Location::ConstantLocation(insn->GetConstant()));
7557
7558 // If we don't need to be materialized, we only need the inputs to be set.
David Brazdilb3e773e2016-01-26 11:28:37 +00007559 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007560 return;
7561 }
7562
7563 switch (insn->GetType()) {
7564 case Primitive::kPrimFloat:
7565 case Primitive::kPrimDouble:
7566 locations->SetOut(Location::RequiresFpuRegister());
7567 break;
7568
7569 case Primitive::kPrimInt:
7570 locations->SetOut(Location::RequiresRegister());
7571 break;
7572
7573 default:
7574 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7575 }
7576}
7577
7578void InstructionCodeGeneratorX86::VisitX86LoadFromConstantTable(HX86LoadFromConstantTable* insn) {
David Brazdilb3e773e2016-01-26 11:28:37 +00007579 if (insn->IsEmittedAtUseSite()) {
Mark Mendell0616ae02015-04-17 12:49:27 -04007580 return;
7581 }
7582
7583 LocationSummary* locations = insn->GetLocations();
7584 Location out = locations->Out();
7585 Register const_area = locations->InAt(0).AsRegister<Register>();
7586 HConstant *value = insn->GetConstant();
7587
7588 switch (insn->GetType()) {
7589 case Primitive::kPrimFloat:
7590 __ movss(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007591 codegen_->LiteralFloatAddress(
7592 value->AsFloatConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04007593 break;
7594
7595 case Primitive::kPrimDouble:
7596 __ movsd(out.AsFpuRegister<XmmRegister>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007597 codegen_->LiteralDoubleAddress(
7598 value->AsDoubleConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04007599 break;
7600
7601 case Primitive::kPrimInt:
7602 __ movl(out.AsRegister<Register>(),
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007603 codegen_->LiteralInt32Address(
7604 value->AsIntConstant()->GetValue(), insn->GetBaseMethodAddress(), const_area));
Mark Mendell0616ae02015-04-17 12:49:27 -04007605 break;
7606
7607 default:
7608 LOG(FATAL) << "Unsupported x86 constant area type " << insn->GetType();
7609 }
7610}
7611
Mark Mendell0616ae02015-04-17 12:49:27 -04007612/**
7613 * Class to handle late fixup of offsets into constant area.
7614 */
Vladimir Marko5233f932015-09-29 19:01:15 +01007615class RIPFixup : public AssemblerFixup, public ArenaObject<kArenaAllocCodeGenerator> {
Mark Mendell0616ae02015-04-17 12:49:27 -04007616 public:
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007617 RIPFixup(CodeGeneratorX86& codegen,
7618 HX86ComputeBaseMethodAddress* base_method_address,
7619 size_t offset)
7620 : codegen_(&codegen),
7621 base_method_address_(base_method_address),
7622 offset_into_constant_area_(offset) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04007623
7624 protected:
7625 void SetOffset(size_t offset) { offset_into_constant_area_ = offset; }
7626
7627 CodeGeneratorX86* codegen_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007628 HX86ComputeBaseMethodAddress* base_method_address_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007629
7630 private:
7631 void Process(const MemoryRegion& region, int pos) OVERRIDE {
7632 // Patch the correct offset for the instruction. The place to patch is the
7633 // last 4 bytes of the instruction.
7634 // The value to patch is the distance from the offset in the constant area
7635 // from the address computed by the HX86ComputeBaseMethodAddress instruction.
Mark Mendell805b3b52015-09-18 14:10:29 -04007636 int32_t constant_offset = codegen_->ConstantAreaStart() + offset_into_constant_area_;
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007637 int32_t relative_position =
7638 constant_offset - codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell0616ae02015-04-17 12:49:27 -04007639
7640 // Patch in the right value.
7641 region.StoreUnaligned<int32_t>(pos - 4, relative_position);
7642 }
7643
Mark Mendell0616ae02015-04-17 12:49:27 -04007644 // Location in constant area that the fixup refers to.
Mark Mendell805b3b52015-09-18 14:10:29 -04007645 int32_t offset_into_constant_area_;
Mark Mendell0616ae02015-04-17 12:49:27 -04007646};
7647
Mark Mendell805b3b52015-09-18 14:10:29 -04007648/**
7649 * Class to handle late fixup of offsets to a jump table that will be created in the
7650 * constant area.
7651 */
7652class JumpTableRIPFixup : public RIPFixup {
7653 public:
7654 JumpTableRIPFixup(CodeGeneratorX86& codegen, HX86PackedSwitch* switch_instr)
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007655 : RIPFixup(codegen, switch_instr->GetBaseMethodAddress(), static_cast<size_t>(-1)),
7656 switch_instr_(switch_instr) {}
Mark Mendell805b3b52015-09-18 14:10:29 -04007657
7658 void CreateJumpTable() {
7659 X86Assembler* assembler = codegen_->GetAssembler();
7660
7661 // Ensure that the reference to the jump table has the correct offset.
7662 const int32_t offset_in_constant_table = assembler->ConstantAreaSize();
7663 SetOffset(offset_in_constant_table);
7664
7665 // The label values in the jump table are computed relative to the
7666 // instruction addressing the constant area.
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007667 const int32_t relative_offset = codegen_->GetMethodAddressOffset(base_method_address_);
Mark Mendell805b3b52015-09-18 14:10:29 -04007668
7669 // Populate the jump table with the correct values for the jump table.
7670 int32_t num_entries = switch_instr_->GetNumEntries();
7671 HBasicBlock* block = switch_instr_->GetBlock();
7672 const ArenaVector<HBasicBlock*>& successors = block->GetSuccessors();
7673 // The value that we want is the target offset - the position of the table.
7674 for (int32_t i = 0; i < num_entries; i++) {
7675 HBasicBlock* b = successors[i];
7676 Label* l = codegen_->GetLabelOf(b);
7677 DCHECK(l->IsBound());
7678 int32_t offset_to_block = l->Position() - relative_offset;
7679 assembler->AppendInt32(offset_to_block);
7680 }
7681 }
7682
7683 private:
7684 const HX86PackedSwitch* switch_instr_;
7685};
7686
7687void CodeGeneratorX86::Finalize(CodeAllocator* allocator) {
7688 // Generate the constant area if needed.
7689 X86Assembler* assembler = GetAssembler();
7690 if (!assembler->IsConstantAreaEmpty() || !fixups_to_jump_tables_.empty()) {
7691 // Align to 4 byte boundary to reduce cache misses, as the data is 4 and 8
7692 // byte values.
7693 assembler->Align(4, 0);
7694 constant_area_start_ = assembler->CodeSize();
7695
7696 // Populate any jump tables.
7697 for (auto jump_table : fixups_to_jump_tables_) {
7698 jump_table->CreateJumpTable();
7699 }
7700
7701 // And now add the constant area to the generated code.
7702 assembler->AddConstantArea();
7703 }
7704
7705 // And finish up.
7706 CodeGenerator::Finalize(allocator);
7707}
7708
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007709Address CodeGeneratorX86::LiteralDoubleAddress(double v,
7710 HX86ComputeBaseMethodAddress* method_base,
7711 Register reg) {
7712 AssemblerFixup* fixup =
7713 new (GetGraph()->GetArena()) RIPFixup(*this, method_base, __ AddDouble(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04007714 return Address(reg, kDummy32BitOffset, fixup);
7715}
7716
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007717Address CodeGeneratorX86::LiteralFloatAddress(float v,
7718 HX86ComputeBaseMethodAddress* method_base,
7719 Register reg) {
7720 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, method_base, __ AddFloat(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04007721 return Address(reg, kDummy32BitOffset, fixup);
7722}
7723
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007724Address CodeGeneratorX86::LiteralInt32Address(int32_t v,
7725 HX86ComputeBaseMethodAddress* method_base,
7726 Register reg) {
7727 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, method_base, __ AddInt32(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04007728 return Address(reg, kDummy32BitOffset, fixup);
7729}
7730
Nicolas Geoffray133719e2017-01-22 15:44:39 +00007731Address CodeGeneratorX86::LiteralInt64Address(int64_t v,
7732 HX86ComputeBaseMethodAddress* method_base,
7733 Register reg) {
7734 AssemblerFixup* fixup = new (GetGraph()->GetArena()) RIPFixup(*this, method_base, __ AddInt64(v));
Mark Mendell0616ae02015-04-17 12:49:27 -04007735 return Address(reg, kDummy32BitOffset, fixup);
7736}
7737
Aart Bika19616e2016-02-01 18:57:58 -08007738void CodeGeneratorX86::Load32BitValue(Register dest, int32_t value) {
7739 if (value == 0) {
7740 __ xorl(dest, dest);
7741 } else {
7742 __ movl(dest, Immediate(value));
7743 }
7744}
7745
7746void CodeGeneratorX86::Compare32BitValue(Register dest, int32_t value) {
7747 if (value == 0) {
7748 __ testl(dest, dest);
7749 } else {
7750 __ cmpl(dest, Immediate(value));
7751 }
7752}
7753
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007754void CodeGeneratorX86::GenerateIntCompare(Location lhs, Location rhs) {
7755 Register lhs_reg = lhs.AsRegister<Register>();
jessicahandojo4877b792016-09-08 19:49:13 -07007756 GenerateIntCompare(lhs_reg, rhs);
7757}
7758
7759void CodeGeneratorX86::GenerateIntCompare(Register lhs, Location rhs) {
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007760 if (rhs.IsConstant()) {
7761 int32_t value = CodeGenerator::GetInt32ValueOf(rhs.GetConstant());
jessicahandojo4877b792016-09-08 19:49:13 -07007762 Compare32BitValue(lhs, value);
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007763 } else if (rhs.IsStackSlot()) {
jessicahandojo4877b792016-09-08 19:49:13 -07007764 __ cmpl(lhs, Address(ESP, rhs.GetStackIndex()));
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007765 } else {
jessicahandojo4877b792016-09-08 19:49:13 -07007766 __ cmpl(lhs, rhs.AsRegister<Register>());
Vladimir Marko56f4bdd2016-09-16 11:32:36 +01007767 }
7768}
7769
7770Address CodeGeneratorX86::ArrayAddress(Register obj,
7771 Location index,
7772 ScaleFactor scale,
7773 uint32_t data_offset) {
7774 return index.IsConstant() ?
7775 Address(obj, (index.GetConstant()->AsIntConstant()->GetValue() << scale) + data_offset) :
7776 Address(obj, index.AsRegister<Register>(), scale, data_offset);
7777}
7778
Mark Mendell805b3b52015-09-18 14:10:29 -04007779Address CodeGeneratorX86::LiteralCaseTable(HX86PackedSwitch* switch_instr,
7780 Register reg,
7781 Register value) {
7782 // Create a fixup to be used to create and address the jump table.
7783 JumpTableRIPFixup* table_fixup =
7784 new (GetGraph()->GetArena()) JumpTableRIPFixup(*this, switch_instr);
7785
7786 // We have to populate the jump tables.
7787 fixups_to_jump_tables_.push_back(table_fixup);
7788
7789 // We want a scaled address, as we are extracting the correct offset from the table.
7790 return Address(reg, value, TIMES_4, kDummy32BitOffset, table_fixup);
7791}
7792
Andreas Gampe85b62f22015-09-09 13:15:38 -07007793// TODO: target as memory.
7794void CodeGeneratorX86::MoveFromReturnRegister(Location target, Primitive::Type type) {
7795 if (!target.IsValid()) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00007796 DCHECK_EQ(type, Primitive::kPrimVoid);
Andreas Gampe85b62f22015-09-09 13:15:38 -07007797 return;
7798 }
7799
7800 DCHECK_NE(type, Primitive::kPrimVoid);
7801
7802 Location return_loc = InvokeDexCallingConventionVisitorX86().GetReturnLocation(type);
7803 if (target.Equals(return_loc)) {
7804 return;
7805 }
7806
7807 // TODO: Consider pairs in the parallel move resolver, then this could be nicely merged
7808 // with the else branch.
7809 if (type == Primitive::kPrimLong) {
7810 HParallelMove parallel_move(GetGraph()->GetArena());
7811 parallel_move.AddMove(return_loc.ToLow(), target.ToLow(), Primitive::kPrimInt, nullptr);
7812 parallel_move.AddMove(return_loc.ToHigh(), target.ToHigh(), Primitive::kPrimInt, nullptr);
7813 GetMoveResolver()->EmitNativeCode(&parallel_move);
7814 } else {
7815 // Let the parallel move resolver take care of all of this.
7816 HParallelMove parallel_move(GetGraph()->GetArena());
7817 parallel_move.AddMove(return_loc, target, type, nullptr);
7818 GetMoveResolver()->EmitNativeCode(&parallel_move);
7819 }
7820}
7821
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007822void CodeGeneratorX86::PatchJitRootUse(uint8_t* code,
7823 const uint8_t* roots_data,
7824 const PatchInfo<Label>& info,
7825 uint64_t index_in_table) const {
7826 uint32_t code_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
7827 uintptr_t address =
7828 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
7829 typedef __attribute__((__aligned__(1))) uint32_t unaligned_uint32_t;
7830 reinterpret_cast<unaligned_uint32_t*>(code + code_offset)[0] =
7831 dchecked_integral_cast<uint32_t>(address);
7832}
7833
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007834void CodeGeneratorX86::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
7835 for (const PatchInfo<Label>& info : jit_string_patches_) {
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007836 const auto& it = jit_string_roots_.find(
7837 StringReference(&info.dex_file, dex::StringIndex(info.index)));
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007838 DCHECK(it != jit_string_roots_.end());
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007839 PatchJitRootUse(code, roots_data, info, it->second);
7840 }
7841
7842 for (const PatchInfo<Label>& info : jit_class_patches_) {
7843 const auto& it = jit_class_roots_.find(
7844 TypeReference(&info.dex_file, dex::TypeIndex(info.index)));
7845 DCHECK(it != jit_class_roots_.end());
7846 PatchJitRootUse(code, roots_data, info, it->second);
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007847 }
7848}
7849
Roland Levillain4d027112015-07-01 15:41:14 +01007850#undef __
7851
Nicolas Geoffrayd4dd2552014-02-28 10:23:58 +00007852} // namespace x86
7853} // namespace art