blob: 950759b5763769782f84278b30dc554637ef115d [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070021#include "entrypoints/quick/quick_entrypoints.h"
22#include "entrypoints/quick/quick_entrypoints_enum.h"
23#include "gc/accounting/card_table.h"
24#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070025#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070026#include "mirror/array-inl.h"
27#include "mirror/class-inl.h"
28#include "offsets.h"
29#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070030#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070031#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070032#include "utils/stack_checks.h"
33
34namespace art {
35namespace mips64 {
36
37static constexpr int kCurrentMethodStackOffset = 0;
38static constexpr GpuRegister kMethodRegisterArgument = A0;
39
Alexey Frunze4dda3372015-06-01 18:31:49 -070040Location Mips64ReturnLocation(Primitive::Type return_type) {
41 switch (return_type) {
42 case Primitive::kPrimBoolean:
43 case Primitive::kPrimByte:
44 case Primitive::kPrimChar:
45 case Primitive::kPrimShort:
46 case Primitive::kPrimInt:
47 case Primitive::kPrimNot:
48 case Primitive::kPrimLong:
49 return Location::RegisterLocation(V0);
50
51 case Primitive::kPrimFloat:
52 case Primitive::kPrimDouble:
53 return Location::FpuRegisterLocation(F0);
54
55 case Primitive::kPrimVoid:
56 return Location();
57 }
58 UNREACHABLE();
59}
60
61Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
62 return Mips64ReturnLocation(type);
63}
64
65Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
66 return Location::RegisterLocation(kMethodRegisterArgument);
67}
68
69Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
70 Location next_location;
71 if (type == Primitive::kPrimVoid) {
72 LOG(FATAL) << "Unexpected parameter type " << type;
73 }
74
75 if (Primitive::IsFloatingPointType(type) &&
76 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
77 next_location = Location::FpuRegisterLocation(
78 calling_convention.GetFpuRegisterAt(float_index_++));
79 gp_index_++;
80 } else if (!Primitive::IsFloatingPointType(type) &&
81 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
82 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
83 float_index_++;
84 } else {
85 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
86 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
87 : Location::StackSlot(stack_offset);
88 }
89
90 // Space on the stack is reserved for all arguments.
91 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
92
93 // TODO: review
94
95 // TODO: shouldn't we use a whole machine word per argument on the stack?
96 // Implicit 4-byte method pointer (and such) will cause misalignment.
97
98 return next_location;
99}
100
101Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
102 return Mips64ReturnLocation(type);
103}
104
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100105// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
106#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700107#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108
109class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
110 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000111 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700112
113 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100114 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700115 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
116 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000117 if (instruction_->CanThrowIntoCatchBlock()) {
118 // Live registers will be restored in the catch block if caught.
119 SaveLiveRegisters(codegen, instruction_->GetLocations());
120 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121 // We're moving two locations to locations that could overlap, so we need a parallel
122 // move resolver.
123 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100124 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700125 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
126 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100127 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700128 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
129 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100130 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
131 ? kQuickThrowStringBounds
132 : kQuickThrowArrayBounds;
133 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100134 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700135 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
136 }
137
Alexandre Rames8158f282015-08-07 10:26:17 +0100138 bool IsFatal() const OVERRIDE { return true; }
139
Roland Levillain46648892015-06-19 16:07:18 +0100140 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
141
Alexey Frunze4dda3372015-06-01 18:31:49 -0700142 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700143 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
144};
145
146class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
147 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000148 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700149
150 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
151 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
152 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100153 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700154 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
155 }
156
Alexandre Rames8158f282015-08-07 10:26:17 +0100157 bool IsFatal() const OVERRIDE { return true; }
158
Roland Levillain46648892015-06-19 16:07:18 +0100159 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
160
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700162 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
163};
164
165class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
166 public:
167 LoadClassSlowPathMIPS64(HLoadClass* cls,
168 HInstruction* at,
169 uint32_t dex_pc,
170 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000171 : SlowPathCodeMIPS64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
173 }
174
175 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
176 LocationSummary* locations = at_->GetLocations();
177 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
178
179 __ Bind(GetEntryLabel());
180 SaveLiveRegisters(codegen, locations);
181
182 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800183 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100184 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
185 : kQuickInitializeType;
186 mips64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187 if (do_clinit_) {
188 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
189 } else {
190 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
191 }
192
193 // Move the class to the desired location.
194 Location out = locations->Out();
195 if (out.IsValid()) {
196 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
197 Primitive::Type type = at_->GetType();
198 mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
199 }
200
201 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700202 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700203 }
204
Roland Levillain46648892015-06-19 16:07:18 +0100205 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
206
Alexey Frunze4dda3372015-06-01 18:31:49 -0700207 private:
208 // The class this slow path will load.
209 HLoadClass* const cls_;
210
211 // The instruction where this slow path is happening.
212 // (Might be the load class or an initialization check).
213 HInstruction* const at_;
214
215 // The dex PC of `at_`.
216 const uint32_t dex_pc_;
217
218 // Whether to initialize the class.
219 const bool do_clinit_;
220
221 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
222};
223
224class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
225 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000226 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700227
228 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
229 LocationSummary* locations = instruction_->GetLocations();
230 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
231 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
232
233 __ Bind(GetEntryLabel());
234 SaveLiveRegisters(codegen, locations);
235
236 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800237 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
David Srbecky9cd6d372016-02-09 15:24:47 +0000238 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
Serban Constantinescufc734082016-07-19 17:18:07 +0100239 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240 instruction_,
241 instruction_->GetDexPc(),
242 this);
243 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
244 Primitive::Type type = instruction_->GetType();
245 mips64_codegen->MoveLocation(locations->Out(),
246 calling_convention.GetReturnLocation(type),
247 type);
248
249 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700250 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700251 }
252
Roland Levillain46648892015-06-19 16:07:18 +0100253 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
254
Alexey Frunze4dda3372015-06-01 18:31:49 -0700255 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700256 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
257};
258
259class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
260 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000261 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262
263 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
264 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
265 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000266 if (instruction_->CanThrowIntoCatchBlock()) {
267 // Live registers will be restored in the catch block if caught.
268 SaveLiveRegisters(codegen, instruction_->GetLocations());
269 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100270 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 instruction_,
272 instruction_->GetDexPc(),
273 this);
274 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
275 }
276
Alexandre Rames8158f282015-08-07 10:26:17 +0100277 bool IsFatal() const OVERRIDE { return true; }
278
Roland Levillain46648892015-06-19 16:07:18 +0100279 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
280
Alexey Frunze4dda3372015-06-01 18:31:49 -0700281 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700282 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
283};
284
285class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
286 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100287 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000288 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700289
290 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
291 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
292 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100293 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700294 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700296 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700298 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700299 }
300 }
301
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700302 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700303 DCHECK(successor_ == nullptr);
304 return &return_label_;
305 }
306
Roland Levillain46648892015-06-19 16:07:18 +0100307 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
308
Alexey Frunze4dda3372015-06-01 18:31:49 -0700309 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700310 // If not null, the block to branch to after the suspend check.
311 HBasicBlock* const successor_;
312
313 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700314 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700315
316 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
317};
318
319class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
320 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000321 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700322
323 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
324 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800325
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100326 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700327 DCHECK(instruction_->IsCheckCast()
328 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
329 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
330
331 __ Bind(GetEntryLabel());
332 SaveLiveRegisters(codegen, locations);
333
334 // We're moving two locations to locations that could overlap, so we need a parallel
335 // move resolver.
336 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800337 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
339 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800340 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700341 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
342 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700343 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100344 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800345 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700346 Primitive::Type ret_type = instruction_->GetType();
347 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
348 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700349 } else {
350 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800351 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
352 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700353 }
354
355 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700356 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700357 }
358
Roland Levillain46648892015-06-19 16:07:18 +0100359 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
360
Alexey Frunze4dda3372015-06-01 18:31:49 -0700361 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700362 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
363};
364
365class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
366 public:
Aart Bik42249c32016-01-07 15:33:50 -0800367 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000368 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700369
370 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800371 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100373 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000374 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 }
376
Roland Levillain46648892015-06-19 16:07:18 +0100377 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
378
Alexey Frunze4dda3372015-06-01 18:31:49 -0700379 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700380 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
381};
382
383CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
384 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100385 const CompilerOptions& compiler_options,
386 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700387 : CodeGenerator(graph,
388 kNumberOfGpuRegisters,
389 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000390 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
392 arraysize(kCoreCalleeSaves)),
393 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
394 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100395 compiler_options,
396 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100397 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 location_builder_(graph, this),
399 instruction_visitor_(graph, this),
400 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100401 assembler_(graph->GetArena()),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 isa_features_(isa_features) {
403 // Save RA (containing the return address) to mimic Quick.
404 AddAllocatedRegister(Location::RegisterLocation(RA));
405}
406
407#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100408// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
409#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700410#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700411
412void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700413 // Ensure that we fix up branches.
414 __ FinalizeCode();
415
416 // Adjust native pc offsets in stack maps.
417 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
418 uint32_t old_position = stack_map_stream_.GetStackMap(i).native_pc_offset;
419 uint32_t new_position = __ GetAdjustedPosition(old_position);
420 DCHECK_GE(new_position, old_position);
421 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
422 }
423
424 // Adjust pc offsets for the disassembly information.
425 if (disasm_info_ != nullptr) {
426 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
427 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
428 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
429 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
430 it.second.start = __ GetAdjustedPosition(it.second.start);
431 it.second.end = __ GetAdjustedPosition(it.second.end);
432 }
433 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
434 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
435 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
436 }
437 }
438
Alexey Frunze4dda3372015-06-01 18:31:49 -0700439 CodeGenerator::Finalize(allocator);
440}
441
442Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
443 return codegen_->GetAssembler();
444}
445
446void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100447 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700448 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
449}
450
451void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100452 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700453 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
454}
455
456void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
457 // Pop reg
458 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +0200459 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700460}
461
462void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
463 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +0200464 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700465 __ Sd(GpuRegister(reg), SP, 0);
466}
467
468void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
469 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
470 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
471 // Allocate a scratch register other than TMP, if available.
472 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
473 // automatically unspilled when the scratch scope object is destroyed).
474 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
475 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +0200476 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700477 __ LoadFromOffset(load_type,
478 GpuRegister(ensure_scratch.GetRegister()),
479 SP,
480 index1 + stack_offset);
481 __ LoadFromOffset(load_type,
482 TMP,
483 SP,
484 index2 + stack_offset);
485 __ StoreToOffset(store_type,
486 GpuRegister(ensure_scratch.GetRegister()),
487 SP,
488 index2 + stack_offset);
489 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
490}
491
492static dwarf::Reg DWARFReg(GpuRegister reg) {
493 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
494}
495
David Srbeckyba702002016-02-01 18:15:29 +0000496static dwarf::Reg DWARFReg(FpuRegister reg) {
497 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
498}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700499
500void CodeGeneratorMIPS64::GenerateFrameEntry() {
501 __ Bind(&frame_entry_label_);
502
503 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
504
505 if (do_overflow_check) {
506 __ LoadFromOffset(kLoadWord,
507 ZERO,
508 SP,
509 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
510 RecordPcInfo(nullptr, 0);
511 }
512
513 // TODO: anything related to T9/GP/GOT/PIC/.so's?
514
515 if (HasEmptyFrame()) {
516 return;
517 }
518
519 // Make sure the frame size isn't unreasonably large. Per the various APIs
520 // it looks like it should always be less than 2GB in size, which allows
521 // us using 32-bit signed offsets from the stack pointer.
522 if (GetFrameSize() > 0x7FFFFFFF)
523 LOG(FATAL) << "Stack frame larger than 2GB";
524
525 // Spill callee-saved registers.
526 // Note that their cumulative size is small and they can be indexed using
527 // 16-bit offsets.
528
529 // TODO: increment/decrement SP in one step instead of two or remove this comment.
530
531 uint32_t ofs = FrameEntrySpillSize();
532 __ IncreaseFrameSize(ofs);
533
534 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
535 GpuRegister reg = kCoreCalleeSaves[i];
536 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200537 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700538 __ Sd(reg, SP, ofs);
539 __ cfi().RelOffset(DWARFReg(reg), ofs);
540 }
541 }
542
543 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
544 FpuRegister reg = kFpuCalleeSaves[i];
545 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200546 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700547 __ Sdc1(reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +0000548 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700549 }
550 }
551
552 // Allocate the rest of the frame and store the current method pointer
553 // at its end.
554
555 __ IncreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
556
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +0100557 // Save the current method if we need it. Note that we do not
558 // do this in HCurrentMethod, as the instruction might have been removed
559 // in the SSA graph.
560 if (RequiresCurrentMethod()) {
561 static_assert(IsInt<16>(kCurrentMethodStackOffset),
562 "kCurrentMethodStackOffset must fit into int16_t");
563 __ Sd(kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
564 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +0100565
566 if (GetGraph()->HasShouldDeoptimizeFlag()) {
567 // Initialize should_deoptimize flag to 0.
568 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
569 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700570}
571
572void CodeGeneratorMIPS64::GenerateFrameExit() {
573 __ cfi().RememberState();
574
575 // TODO: anything related to T9/GP/GOT/PIC/.so's?
576
577 if (!HasEmptyFrame()) {
578 // Deallocate the rest of the frame.
579
580 __ DecreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
581
582 // Restore callee-saved registers.
583 // Note that their cumulative size is small and they can be indexed using
584 // 16-bit offsets.
585
586 // TODO: increment/decrement SP in one step instead of two or remove this comment.
587
588 uint32_t ofs = 0;
589
590 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
591 FpuRegister reg = kFpuCalleeSaves[i];
592 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
593 __ Ldc1(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200594 ofs += kMips64DoublewordSize;
David Srbeckyba702002016-02-01 18:15:29 +0000595 __ cfi().Restore(DWARFReg(reg));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700596 }
597 }
598
599 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
600 GpuRegister reg = kCoreCalleeSaves[i];
601 if (allocated_registers_.ContainsCoreRegister(reg)) {
602 __ Ld(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200603 ofs += kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700604 __ cfi().Restore(DWARFReg(reg));
605 }
606 }
607
608 DCHECK_EQ(ofs, FrameEntrySpillSize());
609 __ DecreaseFrameSize(ofs);
610 }
611
612 __ Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700613 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700614
615 __ cfi().RestoreState();
616 __ cfi().DefCFAOffset(GetFrameSize());
617}
618
619void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
620 __ Bind(GetLabelOf(block));
621}
622
623void CodeGeneratorMIPS64::MoveLocation(Location destination,
624 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100625 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700626 if (source.Equals(destination)) {
627 return;
628 }
629
630 // A valid move can always be inferred from the destination and source
631 // locations. When moving from and to a register, the argument type can be
632 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100633 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700634 DCHECK_EQ(unspecified_type, false);
635
636 if (destination.IsRegister() || destination.IsFpuRegister()) {
637 if (unspecified_type) {
638 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
639 if (source.IsStackSlot() ||
640 (src_cst != nullptr && (src_cst->IsIntConstant()
641 || src_cst->IsFloatConstant()
642 || src_cst->IsNullConstant()))) {
643 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100644 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700645 } else {
646 // If the source is a double stack slot or a 64bit constant, a 64bit
647 // type is appropriate. Else the source is a register, and since the
648 // type has not been specified, we chose a 64bit type to force a 64bit
649 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100650 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700651 }
652 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100653 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
654 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700655 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
656 // Move to GPR/FPR from stack
657 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100658 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700659 __ LoadFpuFromOffset(load_type,
660 destination.AsFpuRegister<FpuRegister>(),
661 SP,
662 source.GetStackIndex());
663 } else {
664 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
665 __ LoadFromOffset(load_type,
666 destination.AsRegister<GpuRegister>(),
667 SP,
668 source.GetStackIndex());
669 }
670 } else if (source.IsConstant()) {
671 // Move to GPR/FPR from constant
672 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100673 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700674 gpr = destination.AsRegister<GpuRegister>();
675 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100676 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700677 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
678 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
679 gpr = ZERO;
680 } else {
681 __ LoadConst32(gpr, value);
682 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700683 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700684 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
685 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
686 gpr = ZERO;
687 } else {
688 __ LoadConst64(gpr, value);
689 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700690 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100691 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700692 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +0100693 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700694 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
695 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100696 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700697 if (destination.IsRegister()) {
698 // Move to GPR from GPR
699 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
700 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100701 DCHECK(destination.IsFpuRegister());
702 if (Primitive::Is64BitType(dst_type)) {
703 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
704 } else {
705 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
706 }
707 }
708 } else if (source.IsFpuRegister()) {
709 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700710 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +0100711 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700712 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
713 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100714 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700715 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
716 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100717 } else {
718 DCHECK(destination.IsRegister());
719 if (Primitive::Is64BitType(dst_type)) {
720 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
721 } else {
722 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
723 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700724 }
725 }
726 } else { // The destination is not a register. It must be a stack slot.
727 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
728 if (source.IsRegister() || source.IsFpuRegister()) {
729 if (unspecified_type) {
730 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100731 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700732 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100733 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700734 }
735 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100736 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
737 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700738 // Move to stack from GPR/FPR
739 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
740 if (source.IsRegister()) {
741 __ StoreToOffset(store_type,
742 source.AsRegister<GpuRegister>(),
743 SP,
744 destination.GetStackIndex());
745 } else {
746 __ StoreFpuToOffset(store_type,
747 source.AsFpuRegister<FpuRegister>(),
748 SP,
749 destination.GetStackIndex());
750 }
751 } else if (source.IsConstant()) {
752 // Move to stack from constant
753 HConstant* src_cst = source.GetConstant();
754 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700755 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700756 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700757 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
758 if (value != 0) {
759 gpr = TMP;
760 __ LoadConst32(gpr, value);
761 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700762 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700763 DCHECK(destination.IsDoubleStackSlot());
764 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
765 if (value != 0) {
766 gpr = TMP;
767 __ LoadConst64(gpr, value);
768 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700769 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700770 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700771 } else {
772 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
773 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
774 // Move to stack from stack
775 if (destination.IsStackSlot()) {
776 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
777 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
778 } else {
779 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
780 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
781 }
782 }
783 }
784}
785
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700786void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700787 DCHECK(!loc1.IsConstant());
788 DCHECK(!loc2.IsConstant());
789
790 if (loc1.Equals(loc2)) {
791 return;
792 }
793
794 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
795 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
796 bool is_fp_reg1 = loc1.IsFpuRegister();
797 bool is_fp_reg2 = loc2.IsFpuRegister();
798
799 if (loc2.IsRegister() && loc1.IsRegister()) {
800 // Swap 2 GPRs
801 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
802 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
803 __ Move(TMP, r2);
804 __ Move(r2, r1);
805 __ Move(r1, TMP);
806 } else if (is_fp_reg2 && is_fp_reg1) {
807 // Swap 2 FPRs
808 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
809 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700810 if (type == Primitive::kPrimFloat) {
811 __ MovS(FTMP, r1);
812 __ MovS(r1, r2);
813 __ MovS(r2, FTMP);
814 } else {
815 DCHECK_EQ(type, Primitive::kPrimDouble);
816 __ MovD(FTMP, r1);
817 __ MovD(r1, r2);
818 __ MovD(r2, FTMP);
819 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700820 } else if (is_slot1 != is_slot2) {
821 // Swap GPR/FPR and stack slot
822 Location reg_loc = is_slot1 ? loc2 : loc1;
823 Location mem_loc = is_slot1 ? loc1 : loc2;
824 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
825 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
826 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
827 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
828 if (reg_loc.IsFpuRegister()) {
829 __ StoreFpuToOffset(store_type,
830 reg_loc.AsFpuRegister<FpuRegister>(),
831 SP,
832 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700833 if (mem_loc.IsStackSlot()) {
834 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
835 } else {
836 DCHECK(mem_loc.IsDoubleStackSlot());
837 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
838 }
839 } else {
840 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
841 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
842 }
843 } else if (is_slot1 && is_slot2) {
844 move_resolver_.Exchange(loc1.GetStackIndex(),
845 loc2.GetStackIndex(),
846 loc1.IsDoubleStackSlot());
847 } else {
848 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
849 }
850}
851
Calin Juravle175dc732015-08-25 15:42:32 +0100852void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
853 DCHECK(location.IsRegister());
854 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
855}
856
Calin Juravlee460d1d2015-09-29 04:52:17 +0100857void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
858 if (location.IsRegister()) {
859 locations->AddTemp(location);
860 } else {
861 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
862 }
863}
864
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100865void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
866 GpuRegister value,
867 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700868 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700869 GpuRegister card = AT;
870 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100871 if (value_can_be_null) {
872 __ Beqzc(value, &done);
873 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700874 __ LoadFromOffset(kLoadDoubleword,
875 card,
876 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700877 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700878 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
879 __ Daddu(temp, card, temp);
880 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100881 if (value_can_be_null) {
882 __ Bind(&done);
883 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700884}
885
David Brazdil58282f42016-01-14 12:45:10 +0000886void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700887 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
888 blocked_core_registers_[ZERO] = true;
889 blocked_core_registers_[K0] = true;
890 blocked_core_registers_[K1] = true;
891 blocked_core_registers_[GP] = true;
892 blocked_core_registers_[SP] = true;
893 blocked_core_registers_[RA] = true;
894
Lazar Trsicd9672662015-09-03 17:33:01 +0200895 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
896 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -0700897 blocked_core_registers_[AT] = true;
898 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +0200899 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700900 blocked_fpu_registers_[FTMP] = true;
901
902 // Reserve suspend and thread registers.
903 blocked_core_registers_[S0] = true;
904 blocked_core_registers_[TR] = true;
905
906 // Reserve T9 for function calls
907 blocked_core_registers_[T9] = true;
908
909 // TODO: review; anything else?
910
Goran Jakovljevic782be112016-06-21 12:39:04 +0200911 if (GetGraph()->IsDebuggable()) {
912 // Stubs do not save callee-save floating point registers. If the graph
913 // is debuggable, we need to deal with these registers differently. For
914 // now, just block them.
915 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
916 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
917 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700918 }
919}
920
Alexey Frunze4dda3372015-06-01 18:31:49 -0700921size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
922 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +0200923 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700924}
925
926size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
927 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +0200928 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700929}
930
931size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
932 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +0200933 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700934}
935
936size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
937 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +0200938 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700939}
940
941void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +0100942 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700943}
944
945void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +0100946 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700947}
948
Calin Juravle175dc732015-08-25 15:42:32 +0100949void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700950 HInstruction* instruction,
951 uint32_t dex_pc,
952 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +0100953 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700954 // TODO: anything related to T9/GP/GOT/PIC/.so's?
Serban Constantinescufc734082016-07-19 17:18:07 +0100955 __ LoadFromOffset(kLoadDoubleword,
956 T9,
957 TR,
958 GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700959 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700960 __ Nop();
Serban Constantinescufc734082016-07-19 17:18:07 +0100961 if (EntrypointRequiresStackMap(entrypoint)) {
962 RecordPcInfo(instruction, dex_pc, slow_path);
963 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700964}
965
966void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
967 GpuRegister class_reg) {
968 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
969 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
970 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
971 // TODO: barrier needed?
972 __ Bind(slow_path->GetExitLabel());
973}
974
975void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
976 __ Sync(0); // only stype 0 is supported
977}
978
979void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
980 HBasicBlock* successor) {
981 SuspendCheckSlowPathMIPS64* slow_path =
982 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
983 codegen_->AddSlowPath(slow_path);
984
985 __ LoadFromOffset(kLoadUnsignedHalfword,
986 TMP,
987 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700988 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700989 if (successor == nullptr) {
990 __ Bnezc(TMP, slow_path->GetEntryLabel());
991 __ Bind(slow_path->GetReturnLabel());
992 } else {
993 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700994 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700995 // slow_path will return to GetLabelOf(successor).
996 }
997}
998
999InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1000 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001001 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001002 assembler_(codegen->GetAssembler()),
1003 codegen_(codegen) {}
1004
1005void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1006 DCHECK_EQ(instruction->InputCount(), 2U);
1007 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1008 Primitive::Type type = instruction->GetResultType();
1009 switch (type) {
1010 case Primitive::kPrimInt:
1011 case Primitive::kPrimLong: {
1012 locations->SetInAt(0, Location::RequiresRegister());
1013 HInstruction* right = instruction->InputAt(1);
1014 bool can_use_imm = false;
1015 if (right->IsConstant()) {
1016 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1017 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1018 can_use_imm = IsUint<16>(imm);
1019 } else if (instruction->IsAdd()) {
1020 can_use_imm = IsInt<16>(imm);
1021 } else {
1022 DCHECK(instruction->IsSub());
1023 can_use_imm = IsInt<16>(-imm);
1024 }
1025 }
1026 if (can_use_imm)
1027 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1028 else
1029 locations->SetInAt(1, Location::RequiresRegister());
1030 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1031 }
1032 break;
1033
1034 case Primitive::kPrimFloat:
1035 case Primitive::kPrimDouble:
1036 locations->SetInAt(0, Location::RequiresFpuRegister());
1037 locations->SetInAt(1, Location::RequiresFpuRegister());
1038 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1039 break;
1040
1041 default:
1042 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1043 }
1044}
1045
1046void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1047 Primitive::Type type = instruction->GetType();
1048 LocationSummary* locations = instruction->GetLocations();
1049
1050 switch (type) {
1051 case Primitive::kPrimInt:
1052 case Primitive::kPrimLong: {
1053 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1054 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1055 Location rhs_location = locations->InAt(1);
1056
1057 GpuRegister rhs_reg = ZERO;
1058 int64_t rhs_imm = 0;
1059 bool use_imm = rhs_location.IsConstant();
1060 if (use_imm) {
1061 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1062 } else {
1063 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1064 }
1065
1066 if (instruction->IsAnd()) {
1067 if (use_imm)
1068 __ Andi(dst, lhs, rhs_imm);
1069 else
1070 __ And(dst, lhs, rhs_reg);
1071 } else if (instruction->IsOr()) {
1072 if (use_imm)
1073 __ Ori(dst, lhs, rhs_imm);
1074 else
1075 __ Or(dst, lhs, rhs_reg);
1076 } else if (instruction->IsXor()) {
1077 if (use_imm)
1078 __ Xori(dst, lhs, rhs_imm);
1079 else
1080 __ Xor(dst, lhs, rhs_reg);
1081 } else if (instruction->IsAdd()) {
1082 if (type == Primitive::kPrimInt) {
1083 if (use_imm)
1084 __ Addiu(dst, lhs, rhs_imm);
1085 else
1086 __ Addu(dst, lhs, rhs_reg);
1087 } else {
1088 if (use_imm)
1089 __ Daddiu(dst, lhs, rhs_imm);
1090 else
1091 __ Daddu(dst, lhs, rhs_reg);
1092 }
1093 } else {
1094 DCHECK(instruction->IsSub());
1095 if (type == Primitive::kPrimInt) {
1096 if (use_imm)
1097 __ Addiu(dst, lhs, -rhs_imm);
1098 else
1099 __ Subu(dst, lhs, rhs_reg);
1100 } else {
1101 if (use_imm)
1102 __ Daddiu(dst, lhs, -rhs_imm);
1103 else
1104 __ Dsubu(dst, lhs, rhs_reg);
1105 }
1106 }
1107 break;
1108 }
1109 case Primitive::kPrimFloat:
1110 case Primitive::kPrimDouble: {
1111 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1112 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1113 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1114 if (instruction->IsAdd()) {
1115 if (type == Primitive::kPrimFloat)
1116 __ AddS(dst, lhs, rhs);
1117 else
1118 __ AddD(dst, lhs, rhs);
1119 } else if (instruction->IsSub()) {
1120 if (type == Primitive::kPrimFloat)
1121 __ SubS(dst, lhs, rhs);
1122 else
1123 __ SubD(dst, lhs, rhs);
1124 } else {
1125 LOG(FATAL) << "Unexpected floating-point binary operation";
1126 }
1127 break;
1128 }
1129 default:
1130 LOG(FATAL) << "Unexpected binary operation type " << type;
1131 }
1132}
1133
1134void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001135 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001136
1137 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1138 Primitive::Type type = instr->GetResultType();
1139 switch (type) {
1140 case Primitive::kPrimInt:
1141 case Primitive::kPrimLong: {
1142 locations->SetInAt(0, Location::RequiresRegister());
1143 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001144 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001145 break;
1146 }
1147 default:
1148 LOG(FATAL) << "Unexpected shift type " << type;
1149 }
1150}
1151
1152void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001153 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001154 LocationSummary* locations = instr->GetLocations();
1155 Primitive::Type type = instr->GetType();
1156
1157 switch (type) {
1158 case Primitive::kPrimInt:
1159 case Primitive::kPrimLong: {
1160 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1161 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1162 Location rhs_location = locations->InAt(1);
1163
1164 GpuRegister rhs_reg = ZERO;
1165 int64_t rhs_imm = 0;
1166 bool use_imm = rhs_location.IsConstant();
1167 if (use_imm) {
1168 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1169 } else {
1170 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1171 }
1172
1173 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001174 uint32_t shift_value = rhs_imm &
1175 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001176
Alexey Frunze92d90602015-12-18 18:16:36 -08001177 if (shift_value == 0) {
1178 if (dst != lhs) {
1179 __ Move(dst, lhs);
1180 }
1181 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001182 if (instr->IsShl()) {
1183 __ Sll(dst, lhs, shift_value);
1184 } else if (instr->IsShr()) {
1185 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001186 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001187 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001188 } else {
1189 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001190 }
1191 } else {
1192 if (shift_value < 32) {
1193 if (instr->IsShl()) {
1194 __ Dsll(dst, lhs, shift_value);
1195 } else if (instr->IsShr()) {
1196 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001197 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001198 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001199 } else {
1200 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001201 }
1202 } else {
1203 shift_value -= 32;
1204 if (instr->IsShl()) {
1205 __ Dsll32(dst, lhs, shift_value);
1206 } else if (instr->IsShr()) {
1207 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001208 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001209 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001210 } else {
1211 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001212 }
1213 }
1214 }
1215 } else {
1216 if (type == Primitive::kPrimInt) {
1217 if (instr->IsShl()) {
1218 __ Sllv(dst, lhs, rhs_reg);
1219 } else if (instr->IsShr()) {
1220 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001221 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001222 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001223 } else {
1224 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001225 }
1226 } else {
1227 if (instr->IsShl()) {
1228 __ Dsllv(dst, lhs, rhs_reg);
1229 } else if (instr->IsShr()) {
1230 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001231 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001232 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001233 } else {
1234 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001235 }
1236 }
1237 }
1238 break;
1239 }
1240 default:
1241 LOG(FATAL) << "Unexpected shift operation type " << type;
1242 }
1243}
1244
1245void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1246 HandleBinaryOp(instruction);
1247}
1248
1249void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1250 HandleBinaryOp(instruction);
1251}
1252
1253void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1254 HandleBinaryOp(instruction);
1255}
1256
1257void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1258 HandleBinaryOp(instruction);
1259}
1260
1261void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
1262 LocationSummary* locations =
1263 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1264 locations->SetInAt(0, Location::RequiresRegister());
1265 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1266 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1267 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1268 } else {
1269 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1270 }
1271}
1272
1273void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
1274 LocationSummary* locations = instruction->GetLocations();
1275 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1276 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001277 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001278
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001279 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001280 switch (type) {
1281 case Primitive::kPrimBoolean: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001282 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1283 if (index.IsConstant()) {
1284 size_t offset =
1285 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1286 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1287 } else {
1288 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1289 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
1290 }
1291 break;
1292 }
1293
1294 case Primitive::kPrimByte: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001295 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1296 if (index.IsConstant()) {
1297 size_t offset =
1298 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1299 __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1300 } else {
1301 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1302 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset);
1303 }
1304 break;
1305 }
1306
1307 case Primitive::kPrimShort: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001308 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1309 if (index.IsConstant()) {
1310 size_t offset =
1311 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1312 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1313 } else {
1314 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1315 __ Daddu(TMP, obj, TMP);
1316 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset);
1317 }
1318 break;
1319 }
1320
1321 case Primitive::kPrimChar: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001322 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1323 if (index.IsConstant()) {
1324 size_t offset =
1325 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1326 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1327 } else {
1328 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1329 __ Daddu(TMP, obj, TMP);
1330 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
1331 }
1332 break;
1333 }
1334
1335 case Primitive::kPrimInt:
1336 case Primitive::kPrimNot: {
1337 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001338 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1339 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
1340 if (index.IsConstant()) {
1341 size_t offset =
1342 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1343 __ LoadFromOffset(load_type, out, obj, offset);
1344 } else {
1345 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1346 __ Daddu(TMP, obj, TMP);
1347 __ LoadFromOffset(load_type, out, TMP, data_offset);
1348 }
1349 break;
1350 }
1351
1352 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001353 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1354 if (index.IsConstant()) {
1355 size_t offset =
1356 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1357 __ LoadFromOffset(kLoadDoubleword, out, obj, offset);
1358 } else {
1359 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1360 __ Daddu(TMP, obj, TMP);
1361 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset);
1362 }
1363 break;
1364 }
1365
1366 case Primitive::kPrimFloat: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001367 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1368 if (index.IsConstant()) {
1369 size_t offset =
1370 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1371 __ LoadFpuFromOffset(kLoadWord, out, obj, offset);
1372 } else {
1373 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1374 __ Daddu(TMP, obj, TMP);
1375 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset);
1376 }
1377 break;
1378 }
1379
1380 case Primitive::kPrimDouble: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001381 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1382 if (index.IsConstant()) {
1383 size_t offset =
1384 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1385 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset);
1386 } else {
1387 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1388 __ Daddu(TMP, obj, TMP);
1389 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset);
1390 }
1391 break;
1392 }
1393
1394 case Primitive::kPrimVoid:
1395 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1396 UNREACHABLE();
1397 }
1398 codegen_->MaybeRecordImplicitNullCheck(instruction);
1399}
1400
1401void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
1402 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1403 locations->SetInAt(0, Location::RequiresRegister());
1404 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1405}
1406
1407void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
1408 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01001409 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001410 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1411 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1412 __ LoadFromOffset(kLoadWord, out, obj, offset);
1413 codegen_->MaybeRecordImplicitNullCheck(instruction);
1414}
1415
1416void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
David Brazdilbb3d5052015-09-21 18:39:16 +01001417 bool needs_runtime_call = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001418 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1419 instruction,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001420 needs_runtime_call ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
David Brazdilbb3d5052015-09-21 18:39:16 +01001421 if (needs_runtime_call) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001422 InvokeRuntimeCallingConvention calling_convention;
1423 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1424 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1425 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1426 } else {
1427 locations->SetInAt(0, Location::RequiresRegister());
1428 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1429 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1430 locations->SetInAt(2, Location::RequiresFpuRegister());
1431 } else {
1432 locations->SetInAt(2, Location::RequiresRegister());
1433 }
1434 }
1435}
1436
1437void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
1438 LocationSummary* locations = instruction->GetLocations();
1439 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1440 Location index = locations->InAt(1);
1441 Primitive::Type value_type = instruction->GetComponentType();
1442 bool needs_runtime_call = locations->WillCall();
1443 bool needs_write_barrier =
1444 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
1445
1446 switch (value_type) {
1447 case Primitive::kPrimBoolean:
1448 case Primitive::kPrimByte: {
1449 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1450 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1451 if (index.IsConstant()) {
1452 size_t offset =
1453 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1454 __ StoreToOffset(kStoreByte, value, obj, offset);
1455 } else {
1456 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1457 __ StoreToOffset(kStoreByte, value, TMP, data_offset);
1458 }
1459 break;
1460 }
1461
1462 case Primitive::kPrimShort:
1463 case Primitive::kPrimChar: {
1464 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1465 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1466 if (index.IsConstant()) {
1467 size_t offset =
1468 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1469 __ StoreToOffset(kStoreHalfword, value, obj, offset);
1470 } else {
1471 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1472 __ Daddu(TMP, obj, TMP);
1473 __ StoreToOffset(kStoreHalfword, value, TMP, data_offset);
1474 }
1475 break;
1476 }
1477
1478 case Primitive::kPrimInt:
1479 case Primitive::kPrimNot: {
1480 if (!needs_runtime_call) {
1481 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1482 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1483 if (index.IsConstant()) {
1484 size_t offset =
1485 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1486 __ StoreToOffset(kStoreWord, value, obj, offset);
1487 } else {
1488 DCHECK(index.IsRegister()) << index;
1489 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1490 __ Daddu(TMP, obj, TMP);
1491 __ StoreToOffset(kStoreWord, value, TMP, data_offset);
1492 }
1493 codegen_->MaybeRecordImplicitNullCheck(instruction);
1494 if (needs_write_barrier) {
1495 DCHECK_EQ(value_type, Primitive::kPrimNot);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001496 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001497 }
1498 } else {
1499 DCHECK_EQ(value_type, Primitive::kPrimNot);
Serban Constantinescufc734082016-07-19 17:18:07 +01001500 codegen_->InvokeRuntime(kQuickAputObject, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00001501 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001502 }
1503 break;
1504 }
1505
1506 case Primitive::kPrimLong: {
1507 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1508 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1509 if (index.IsConstant()) {
1510 size_t offset =
1511 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1512 __ StoreToOffset(kStoreDoubleword, value, obj, offset);
1513 } else {
1514 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1515 __ Daddu(TMP, obj, TMP);
1516 __ StoreToOffset(kStoreDoubleword, value, TMP, data_offset);
1517 }
1518 break;
1519 }
1520
1521 case Primitive::kPrimFloat: {
1522 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
1523 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1524 DCHECK(locations->InAt(2).IsFpuRegister());
1525 if (index.IsConstant()) {
1526 size_t offset =
1527 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1528 __ StoreFpuToOffset(kStoreWord, value, obj, offset);
1529 } else {
1530 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1531 __ Daddu(TMP, obj, TMP);
1532 __ StoreFpuToOffset(kStoreWord, value, TMP, data_offset);
1533 }
1534 break;
1535 }
1536
1537 case Primitive::kPrimDouble: {
1538 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
1539 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1540 DCHECK(locations->InAt(2).IsFpuRegister());
1541 if (index.IsConstant()) {
1542 size_t offset =
1543 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1544 __ StoreFpuToOffset(kStoreDoubleword, value, obj, offset);
1545 } else {
1546 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1547 __ Daddu(TMP, obj, TMP);
1548 __ StoreFpuToOffset(kStoreDoubleword, value, TMP, data_offset);
1549 }
1550 break;
1551 }
1552
1553 case Primitive::kPrimVoid:
1554 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1555 UNREACHABLE();
1556 }
1557
1558 // Ints and objects are handled in the switch.
1559 if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
1560 codegen_->MaybeRecordImplicitNullCheck(instruction);
1561 }
1562}
1563
1564void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001565 RegisterSet caller_saves = RegisterSet::Empty();
1566 InvokeRuntimeCallingConvention calling_convention;
1567 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1568 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1569 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001570 locations->SetInAt(0, Location::RequiresRegister());
1571 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001572}
1573
1574void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
1575 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001576 BoundsCheckSlowPathMIPS64* slow_path =
1577 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001578 codegen_->AddSlowPath(slow_path);
1579
1580 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
1581 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
1582
1583 // length is limited by the maximum positive signed 32-bit integer.
1584 // Unsigned comparison of length and index checks for index < 0
1585 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001586 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001587}
1588
1589void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
1590 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1591 instruction,
1592 LocationSummary::kCallOnSlowPath);
1593 locations->SetInAt(0, Location::RequiresRegister());
1594 locations->SetInAt(1, Location::RequiresRegister());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001595 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001596 locations->AddTemp(Location::RequiresRegister());
1597}
1598
1599void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
1600 LocationSummary* locations = instruction->GetLocations();
1601 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1602 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
1603 GpuRegister obj_cls = locations->GetTemp(0).AsRegister<GpuRegister>();
1604
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001605 SlowPathCodeMIPS64* slow_path =
1606 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001607 codegen_->AddSlowPath(slow_path);
1608
1609 // TODO: avoid this check if we know obj is not null.
1610 __ Beqzc(obj, slow_path->GetExitLabel());
1611 // Compare the class of `obj` with `cls`.
1612 __ LoadFromOffset(kLoadUnsignedWord, obj_cls, obj, mirror::Object::ClassOffset().Int32Value());
1613 __ Bnec(obj_cls, cls, slow_path->GetEntryLabel());
1614 __ Bind(slow_path->GetExitLabel());
1615}
1616
1617void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
1618 LocationSummary* locations =
1619 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1620 locations->SetInAt(0, Location::RequiresRegister());
1621 if (check->HasUses()) {
1622 locations->SetOut(Location::SameAsFirstInput());
1623 }
1624}
1625
1626void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
1627 // We assume the class is not null.
1628 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
1629 check->GetLoadClass(),
1630 check,
1631 check->GetDexPc(),
1632 true);
1633 codegen_->AddSlowPath(slow_path);
1634 GenerateClassInitializationCheck(slow_path,
1635 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
1636}
1637
1638void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
1639 Primitive::Type in_type = compare->InputAt(0)->GetType();
1640
Alexey Frunze299a9392015-12-08 16:08:02 -08001641 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001642
1643 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001644 case Primitive::kPrimBoolean:
1645 case Primitive::kPrimByte:
1646 case Primitive::kPrimShort:
1647 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001648 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001649 case Primitive::kPrimLong:
1650 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001651 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001652 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1653 break;
1654
1655 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08001656 case Primitive::kPrimDouble:
1657 locations->SetInAt(0, Location::RequiresFpuRegister());
1658 locations->SetInAt(1, Location::RequiresFpuRegister());
1659 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001660 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001661
1662 default:
1663 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1664 }
1665}
1666
1667void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
1668 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001669 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001670 Primitive::Type in_type = instruction->InputAt(0)->GetType();
1671
1672 // 0 if: left == right
1673 // 1 if: left > right
1674 // -1 if: left < right
1675 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001676 case Primitive::kPrimBoolean:
1677 case Primitive::kPrimByte:
1678 case Primitive::kPrimShort:
1679 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001680 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001681 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001682 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001683 Location rhs_location = locations->InAt(1);
1684 bool use_imm = rhs_location.IsConstant();
1685 GpuRegister rhs = ZERO;
1686 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001687 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08001688 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
1689 if (value != 0) {
1690 rhs = AT;
1691 __ LoadConst64(rhs, value);
1692 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00001693 } else {
1694 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
1695 if (value != 0) {
1696 rhs = AT;
1697 __ LoadConst32(rhs, value);
1698 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001699 }
1700 } else {
1701 rhs = rhs_location.AsRegister<GpuRegister>();
1702 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001703 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08001704 __ Slt(res, rhs, lhs);
1705 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001706 break;
1707 }
1708
Alexey Frunze299a9392015-12-08 16:08:02 -08001709 case Primitive::kPrimFloat: {
1710 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1711 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1712 Mips64Label done;
1713 __ CmpEqS(FTMP, lhs, rhs);
1714 __ LoadConst32(res, 0);
1715 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001716 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001717 __ CmpLtS(FTMP, lhs, rhs);
1718 __ LoadConst32(res, -1);
1719 __ Bc1nez(FTMP, &done);
1720 __ LoadConst32(res, 1);
1721 } else {
1722 __ CmpLtS(FTMP, rhs, lhs);
1723 __ LoadConst32(res, 1);
1724 __ Bc1nez(FTMP, &done);
1725 __ LoadConst32(res, -1);
1726 }
1727 __ Bind(&done);
1728 break;
1729 }
1730
Alexey Frunze4dda3372015-06-01 18:31:49 -07001731 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08001732 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1733 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1734 Mips64Label done;
1735 __ CmpEqD(FTMP, lhs, rhs);
1736 __ LoadConst32(res, 0);
1737 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001738 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001739 __ CmpLtD(FTMP, lhs, rhs);
1740 __ LoadConst32(res, -1);
1741 __ Bc1nez(FTMP, &done);
1742 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001743 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08001744 __ CmpLtD(FTMP, rhs, lhs);
1745 __ LoadConst32(res, 1);
1746 __ Bc1nez(FTMP, &done);
1747 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001748 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001749 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001750 break;
1751 }
1752
1753 default:
1754 LOG(FATAL) << "Unimplemented compare type " << in_type;
1755 }
1756}
1757
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001758void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001759 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08001760 switch (instruction->InputAt(0)->GetType()) {
1761 default:
1762 case Primitive::kPrimLong:
1763 locations->SetInAt(0, Location::RequiresRegister());
1764 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1765 break;
1766
1767 case Primitive::kPrimFloat:
1768 case Primitive::kPrimDouble:
1769 locations->SetInAt(0, Location::RequiresFpuRegister());
1770 locations->SetInAt(1, Location::RequiresFpuRegister());
1771 break;
1772 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001773 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001774 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1775 }
1776}
1777
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001778void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001779 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001780 return;
1781 }
1782
Alexey Frunze299a9392015-12-08 16:08:02 -08001783 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001784 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001785 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze299a9392015-12-08 16:08:02 -08001786 Mips64Label true_label;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001787
Alexey Frunze299a9392015-12-08 16:08:02 -08001788 switch (type) {
1789 default:
1790 // Integer case.
1791 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
1792 return;
1793 case Primitive::kPrimLong:
1794 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
1795 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001796
Alexey Frunze299a9392015-12-08 16:08:02 -08001797 case Primitive::kPrimFloat:
1798 case Primitive::kPrimDouble:
1799 // TODO: don't use branches.
1800 GenerateFpCompareAndBranch(instruction->GetCondition(),
1801 instruction->IsGtBias(),
1802 type,
1803 locations,
1804 &true_label);
Aart Bike9f37602015-10-09 11:15:55 -07001805 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001806 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001807
1808 // Convert the branches into the result.
1809 Mips64Label done;
1810
1811 // False case: result = 0.
1812 __ LoadConst32(dst, 0);
1813 __ Bc(&done);
1814
1815 // True case: result = 1.
1816 __ Bind(&true_label);
1817 __ LoadConst32(dst, 1);
1818 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001819}
1820
Alexey Frunzec857c742015-09-23 15:12:39 -07001821void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1822 DCHECK(instruction->IsDiv() || instruction->IsRem());
1823 Primitive::Type type = instruction->GetResultType();
1824
1825 LocationSummary* locations = instruction->GetLocations();
1826 Location second = locations->InAt(1);
1827 DCHECK(second.IsConstant());
1828
1829 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1830 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1831 int64_t imm = Int64FromConstant(second.GetConstant());
1832 DCHECK(imm == 1 || imm == -1);
1833
1834 if (instruction->IsRem()) {
1835 __ Move(out, ZERO);
1836 } else {
1837 if (imm == -1) {
1838 if (type == Primitive::kPrimInt) {
1839 __ Subu(out, ZERO, dividend);
1840 } else {
1841 DCHECK_EQ(type, Primitive::kPrimLong);
1842 __ Dsubu(out, ZERO, dividend);
1843 }
1844 } else if (out != dividend) {
1845 __ Move(out, dividend);
1846 }
1847 }
1848}
1849
1850void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1851 DCHECK(instruction->IsDiv() || instruction->IsRem());
1852 Primitive::Type type = instruction->GetResultType();
1853
1854 LocationSummary* locations = instruction->GetLocations();
1855 Location second = locations->InAt(1);
1856 DCHECK(second.IsConstant());
1857
1858 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1859 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1860 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00001861 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07001862 int ctz_imm = CTZ(abs_imm);
1863
1864 if (instruction->IsDiv()) {
1865 if (type == Primitive::kPrimInt) {
1866 if (ctz_imm == 1) {
1867 // Fast path for division by +/-2, which is very common.
1868 __ Srl(TMP, dividend, 31);
1869 } else {
1870 __ Sra(TMP, dividend, 31);
1871 __ Srl(TMP, TMP, 32 - ctz_imm);
1872 }
1873 __ Addu(out, dividend, TMP);
1874 __ Sra(out, out, ctz_imm);
1875 if (imm < 0) {
1876 __ Subu(out, ZERO, out);
1877 }
1878 } else {
1879 DCHECK_EQ(type, Primitive::kPrimLong);
1880 if (ctz_imm == 1) {
1881 // Fast path for division by +/-2, which is very common.
1882 __ Dsrl32(TMP, dividend, 31);
1883 } else {
1884 __ Dsra32(TMP, dividend, 31);
1885 if (ctz_imm > 32) {
1886 __ Dsrl(TMP, TMP, 64 - ctz_imm);
1887 } else {
1888 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
1889 }
1890 }
1891 __ Daddu(out, dividend, TMP);
1892 if (ctz_imm < 32) {
1893 __ Dsra(out, out, ctz_imm);
1894 } else {
1895 __ Dsra32(out, out, ctz_imm - 32);
1896 }
1897 if (imm < 0) {
1898 __ Dsubu(out, ZERO, out);
1899 }
1900 }
1901 } else {
1902 if (type == Primitive::kPrimInt) {
1903 if (ctz_imm == 1) {
1904 // Fast path for modulo +/-2, which is very common.
1905 __ Sra(TMP, dividend, 31);
1906 __ Subu(out, dividend, TMP);
1907 __ Andi(out, out, 1);
1908 __ Addu(out, out, TMP);
1909 } else {
1910 __ Sra(TMP, dividend, 31);
1911 __ Srl(TMP, TMP, 32 - ctz_imm);
1912 __ Addu(out, dividend, TMP);
1913 if (IsUint<16>(abs_imm - 1)) {
1914 __ Andi(out, out, abs_imm - 1);
1915 } else {
1916 __ Sll(out, out, 32 - ctz_imm);
1917 __ Srl(out, out, 32 - ctz_imm);
1918 }
1919 __ Subu(out, out, TMP);
1920 }
1921 } else {
1922 DCHECK_EQ(type, Primitive::kPrimLong);
1923 if (ctz_imm == 1) {
1924 // Fast path for modulo +/-2, which is very common.
1925 __ Dsra32(TMP, dividend, 31);
1926 __ Dsubu(out, dividend, TMP);
1927 __ Andi(out, out, 1);
1928 __ Daddu(out, out, TMP);
1929 } else {
1930 __ Dsra32(TMP, dividend, 31);
1931 if (ctz_imm > 32) {
1932 __ Dsrl(TMP, TMP, 64 - ctz_imm);
1933 } else {
1934 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
1935 }
1936 __ Daddu(out, dividend, TMP);
1937 if (IsUint<16>(abs_imm - 1)) {
1938 __ Andi(out, out, abs_imm - 1);
1939 } else {
1940 if (ctz_imm > 32) {
1941 __ Dsll(out, out, 64 - ctz_imm);
1942 __ Dsrl(out, out, 64 - ctz_imm);
1943 } else {
1944 __ Dsll32(out, out, 32 - ctz_imm);
1945 __ Dsrl32(out, out, 32 - ctz_imm);
1946 }
1947 }
1948 __ Dsubu(out, out, TMP);
1949 }
1950 }
1951 }
1952}
1953
1954void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
1955 DCHECK(instruction->IsDiv() || instruction->IsRem());
1956
1957 LocationSummary* locations = instruction->GetLocations();
1958 Location second = locations->InAt(1);
1959 DCHECK(second.IsConstant());
1960
1961 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1962 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1963 int64_t imm = Int64FromConstant(second.GetConstant());
1964
1965 Primitive::Type type = instruction->GetResultType();
1966 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
1967
1968 int64_t magic;
1969 int shift;
1970 CalculateMagicAndShiftForDivRem(imm,
1971 (type == Primitive::kPrimLong),
1972 &magic,
1973 &shift);
1974
1975 if (type == Primitive::kPrimInt) {
1976 __ LoadConst32(TMP, magic);
1977 __ MuhR6(TMP, dividend, TMP);
1978
1979 if (imm > 0 && magic < 0) {
1980 __ Addu(TMP, TMP, dividend);
1981 } else if (imm < 0 && magic > 0) {
1982 __ Subu(TMP, TMP, dividend);
1983 }
1984
1985 if (shift != 0) {
1986 __ Sra(TMP, TMP, shift);
1987 }
1988
1989 if (instruction->IsDiv()) {
1990 __ Sra(out, TMP, 31);
1991 __ Subu(out, TMP, out);
1992 } else {
1993 __ Sra(AT, TMP, 31);
1994 __ Subu(AT, TMP, AT);
1995 __ LoadConst32(TMP, imm);
1996 __ MulR6(TMP, AT, TMP);
1997 __ Subu(out, dividend, TMP);
1998 }
1999 } else {
2000 __ LoadConst64(TMP, magic);
2001 __ Dmuh(TMP, dividend, TMP);
2002
2003 if (imm > 0 && magic < 0) {
2004 __ Daddu(TMP, TMP, dividend);
2005 } else if (imm < 0 && magic > 0) {
2006 __ Dsubu(TMP, TMP, dividend);
2007 }
2008
2009 if (shift >= 32) {
2010 __ Dsra32(TMP, TMP, shift - 32);
2011 } else if (shift > 0) {
2012 __ Dsra(TMP, TMP, shift);
2013 }
2014
2015 if (instruction->IsDiv()) {
2016 __ Dsra32(out, TMP, 31);
2017 __ Dsubu(out, TMP, out);
2018 } else {
2019 __ Dsra32(AT, TMP, 31);
2020 __ Dsubu(AT, TMP, AT);
2021 __ LoadConst64(TMP, imm);
2022 __ Dmul(TMP, AT, TMP);
2023 __ Dsubu(out, dividend, TMP);
2024 }
2025 }
2026}
2027
2028void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2029 DCHECK(instruction->IsDiv() || instruction->IsRem());
2030 Primitive::Type type = instruction->GetResultType();
2031 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2032
2033 LocationSummary* locations = instruction->GetLocations();
2034 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2035 Location second = locations->InAt(1);
2036
2037 if (second.IsConstant()) {
2038 int64_t imm = Int64FromConstant(second.GetConstant());
2039 if (imm == 0) {
2040 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2041 } else if (imm == 1 || imm == -1) {
2042 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002043 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07002044 DivRemByPowerOfTwo(instruction);
2045 } else {
2046 DCHECK(imm <= -2 || imm >= 2);
2047 GenerateDivRemWithAnyConstant(instruction);
2048 }
2049 } else {
2050 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2051 GpuRegister divisor = second.AsRegister<GpuRegister>();
2052 if (instruction->IsDiv()) {
2053 if (type == Primitive::kPrimInt)
2054 __ DivR6(out, dividend, divisor);
2055 else
2056 __ Ddiv(out, dividend, divisor);
2057 } else {
2058 if (type == Primitive::kPrimInt)
2059 __ ModR6(out, dividend, divisor);
2060 else
2061 __ Dmod(out, dividend, divisor);
2062 }
2063 }
2064}
2065
Alexey Frunze4dda3372015-06-01 18:31:49 -07002066void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
2067 LocationSummary* locations =
2068 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2069 switch (div->GetResultType()) {
2070 case Primitive::kPrimInt:
2071 case Primitive::kPrimLong:
2072 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07002073 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002074 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2075 break;
2076
2077 case Primitive::kPrimFloat:
2078 case Primitive::kPrimDouble:
2079 locations->SetInAt(0, Location::RequiresFpuRegister());
2080 locations->SetInAt(1, Location::RequiresFpuRegister());
2081 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2082 break;
2083
2084 default:
2085 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2086 }
2087}
2088
2089void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
2090 Primitive::Type type = instruction->GetType();
2091 LocationSummary* locations = instruction->GetLocations();
2092
2093 switch (type) {
2094 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07002095 case Primitive::kPrimLong:
2096 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002097 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002098 case Primitive::kPrimFloat:
2099 case Primitive::kPrimDouble: {
2100 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2101 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2102 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2103 if (type == Primitive::kPrimFloat)
2104 __ DivS(dst, lhs, rhs);
2105 else
2106 __ DivD(dst, lhs, rhs);
2107 break;
2108 }
2109 default:
2110 LOG(FATAL) << "Unexpected div type " << type;
2111 }
2112}
2113
2114void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002115 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002116 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002117}
2118
2119void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2120 SlowPathCodeMIPS64* slow_path =
2121 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
2122 codegen_->AddSlowPath(slow_path);
2123 Location value = instruction->GetLocations()->InAt(0);
2124
2125 Primitive::Type type = instruction->GetType();
2126
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002127 if (!Primitive::IsIntegralType(type)) {
2128 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002129 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002130 }
2131
2132 if (value.IsConstant()) {
2133 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
2134 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002135 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002136 } else {
2137 // A division by a non-null constant is valid. We don't need to perform
2138 // any check, so simply fall through.
2139 }
2140 } else {
2141 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2142 }
2143}
2144
2145void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
2146 LocationSummary* locations =
2147 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2148 locations->SetOut(Location::ConstantLocation(constant));
2149}
2150
2151void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
2152 // Will be generated at use site.
2153}
2154
2155void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
2156 exit->SetLocations(nullptr);
2157}
2158
2159void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2160}
2161
2162void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
2163 LocationSummary* locations =
2164 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2165 locations->SetOut(Location::ConstantLocation(constant));
2166}
2167
2168void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
2169 // Will be generated at use site.
2170}
2171
David Brazdilfc6a86a2015-06-26 10:33:45 +00002172void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002173 DCHECK(!successor->IsExitBlock());
2174 HBasicBlock* block = got->GetBlock();
2175 HInstruction* previous = got->GetPrevious();
2176 HLoopInformation* info = block->GetLoopInformation();
2177
2178 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
2179 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2180 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2181 return;
2182 }
2183 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2184 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2185 }
2186 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002187 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002188 }
2189}
2190
David Brazdilfc6a86a2015-06-26 10:33:45 +00002191void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
2192 got->SetLocations(nullptr);
2193}
2194
2195void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
2196 HandleGoto(got, got->GetSuccessor());
2197}
2198
2199void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2200 try_boundary->SetLocations(nullptr);
2201}
2202
2203void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2204 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2205 if (!successor->IsExitBlock()) {
2206 HandleGoto(try_boundary, successor);
2207 }
2208}
2209
Alexey Frunze299a9392015-12-08 16:08:02 -08002210void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
2211 bool is64bit,
2212 LocationSummary* locations) {
2213 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2214 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2215 Location rhs_location = locations->InAt(1);
2216 GpuRegister rhs_reg = ZERO;
2217 int64_t rhs_imm = 0;
2218 bool use_imm = rhs_location.IsConstant();
2219 if (use_imm) {
2220 if (is64bit) {
2221 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2222 } else {
2223 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2224 }
2225 } else {
2226 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2227 }
2228 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
2229
2230 switch (cond) {
2231 case kCondEQ:
2232 case kCondNE:
2233 if (use_imm && IsUint<16>(rhs_imm)) {
2234 __ Xori(dst, lhs, rhs_imm);
2235 } else {
2236 if (use_imm) {
2237 rhs_reg = TMP;
2238 __ LoadConst64(rhs_reg, rhs_imm);
2239 }
2240 __ Xor(dst, lhs, rhs_reg);
2241 }
2242 if (cond == kCondEQ) {
2243 __ Sltiu(dst, dst, 1);
2244 } else {
2245 __ Sltu(dst, ZERO, dst);
2246 }
2247 break;
2248
2249 case kCondLT:
2250 case kCondGE:
2251 if (use_imm && IsInt<16>(rhs_imm)) {
2252 __ Slti(dst, lhs, rhs_imm);
2253 } else {
2254 if (use_imm) {
2255 rhs_reg = TMP;
2256 __ LoadConst64(rhs_reg, rhs_imm);
2257 }
2258 __ Slt(dst, lhs, rhs_reg);
2259 }
2260 if (cond == kCondGE) {
2261 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2262 // only the slt instruction but no sge.
2263 __ Xori(dst, dst, 1);
2264 }
2265 break;
2266
2267 case kCondLE:
2268 case kCondGT:
2269 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
2270 // Simulate lhs <= rhs via lhs < rhs + 1.
2271 __ Slti(dst, lhs, rhs_imm_plus_one);
2272 if (cond == kCondGT) {
2273 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2274 // only the slti instruction but no sgti.
2275 __ Xori(dst, dst, 1);
2276 }
2277 } else {
2278 if (use_imm) {
2279 rhs_reg = TMP;
2280 __ LoadConst64(rhs_reg, rhs_imm);
2281 }
2282 __ Slt(dst, rhs_reg, lhs);
2283 if (cond == kCondLE) {
2284 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2285 // only the slt instruction but no sle.
2286 __ Xori(dst, dst, 1);
2287 }
2288 }
2289 break;
2290
2291 case kCondB:
2292 case kCondAE:
2293 if (use_imm && IsInt<16>(rhs_imm)) {
2294 // Sltiu sign-extends its 16-bit immediate operand before
2295 // the comparison and thus lets us compare directly with
2296 // unsigned values in the ranges [0, 0x7fff] and
2297 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2298 __ Sltiu(dst, lhs, rhs_imm);
2299 } else {
2300 if (use_imm) {
2301 rhs_reg = TMP;
2302 __ LoadConst64(rhs_reg, rhs_imm);
2303 }
2304 __ Sltu(dst, lhs, rhs_reg);
2305 }
2306 if (cond == kCondAE) {
2307 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2308 // only the sltu instruction but no sgeu.
2309 __ Xori(dst, dst, 1);
2310 }
2311 break;
2312
2313 case kCondBE:
2314 case kCondA:
2315 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
2316 // Simulate lhs <= rhs via lhs < rhs + 1.
2317 // Note that this only works if rhs + 1 does not overflow
2318 // to 0, hence the check above.
2319 // Sltiu sign-extends its 16-bit immediate operand before
2320 // the comparison and thus lets us compare directly with
2321 // unsigned values in the ranges [0, 0x7fff] and
2322 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2323 __ Sltiu(dst, lhs, rhs_imm_plus_one);
2324 if (cond == kCondA) {
2325 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2326 // only the sltiu instruction but no sgtiu.
2327 __ Xori(dst, dst, 1);
2328 }
2329 } else {
2330 if (use_imm) {
2331 rhs_reg = TMP;
2332 __ LoadConst64(rhs_reg, rhs_imm);
2333 }
2334 __ Sltu(dst, rhs_reg, lhs);
2335 if (cond == kCondBE) {
2336 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2337 // only the sltu instruction but no sleu.
2338 __ Xori(dst, dst, 1);
2339 }
2340 }
2341 break;
2342 }
2343}
2344
2345void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
2346 bool is64bit,
2347 LocationSummary* locations,
2348 Mips64Label* label) {
2349 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2350 Location rhs_location = locations->InAt(1);
2351 GpuRegister rhs_reg = ZERO;
2352 int64_t rhs_imm = 0;
2353 bool use_imm = rhs_location.IsConstant();
2354 if (use_imm) {
2355 if (is64bit) {
2356 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2357 } else {
2358 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2359 }
2360 } else {
2361 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2362 }
2363
2364 if (use_imm && rhs_imm == 0) {
2365 switch (cond) {
2366 case kCondEQ:
2367 case kCondBE: // <= 0 if zero
2368 __ Beqzc(lhs, label);
2369 break;
2370 case kCondNE:
2371 case kCondA: // > 0 if non-zero
2372 __ Bnezc(lhs, label);
2373 break;
2374 case kCondLT:
2375 __ Bltzc(lhs, label);
2376 break;
2377 case kCondGE:
2378 __ Bgezc(lhs, label);
2379 break;
2380 case kCondLE:
2381 __ Blezc(lhs, label);
2382 break;
2383 case kCondGT:
2384 __ Bgtzc(lhs, label);
2385 break;
2386 case kCondB: // always false
2387 break;
2388 case kCondAE: // always true
2389 __ Bc(label);
2390 break;
2391 }
2392 } else {
2393 if (use_imm) {
2394 rhs_reg = TMP;
2395 __ LoadConst64(rhs_reg, rhs_imm);
2396 }
2397 switch (cond) {
2398 case kCondEQ:
2399 __ Beqc(lhs, rhs_reg, label);
2400 break;
2401 case kCondNE:
2402 __ Bnec(lhs, rhs_reg, label);
2403 break;
2404 case kCondLT:
2405 __ Bltc(lhs, rhs_reg, label);
2406 break;
2407 case kCondGE:
2408 __ Bgec(lhs, rhs_reg, label);
2409 break;
2410 case kCondLE:
2411 __ Bgec(rhs_reg, lhs, label);
2412 break;
2413 case kCondGT:
2414 __ Bltc(rhs_reg, lhs, label);
2415 break;
2416 case kCondB:
2417 __ Bltuc(lhs, rhs_reg, label);
2418 break;
2419 case kCondAE:
2420 __ Bgeuc(lhs, rhs_reg, label);
2421 break;
2422 case kCondBE:
2423 __ Bgeuc(rhs_reg, lhs, label);
2424 break;
2425 case kCondA:
2426 __ Bltuc(rhs_reg, lhs, label);
2427 break;
2428 }
2429 }
2430}
2431
2432void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
2433 bool gt_bias,
2434 Primitive::Type type,
2435 LocationSummary* locations,
2436 Mips64Label* label) {
2437 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2438 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2439 if (type == Primitive::kPrimFloat) {
2440 switch (cond) {
2441 case kCondEQ:
2442 __ CmpEqS(FTMP, lhs, rhs);
2443 __ Bc1nez(FTMP, label);
2444 break;
2445 case kCondNE:
2446 __ CmpEqS(FTMP, lhs, rhs);
2447 __ Bc1eqz(FTMP, label);
2448 break;
2449 case kCondLT:
2450 if (gt_bias) {
2451 __ CmpLtS(FTMP, lhs, rhs);
2452 } else {
2453 __ CmpUltS(FTMP, lhs, rhs);
2454 }
2455 __ Bc1nez(FTMP, label);
2456 break;
2457 case kCondLE:
2458 if (gt_bias) {
2459 __ CmpLeS(FTMP, lhs, rhs);
2460 } else {
2461 __ CmpUleS(FTMP, lhs, rhs);
2462 }
2463 __ Bc1nez(FTMP, label);
2464 break;
2465 case kCondGT:
2466 if (gt_bias) {
2467 __ CmpUltS(FTMP, rhs, lhs);
2468 } else {
2469 __ CmpLtS(FTMP, rhs, lhs);
2470 }
2471 __ Bc1nez(FTMP, label);
2472 break;
2473 case kCondGE:
2474 if (gt_bias) {
2475 __ CmpUleS(FTMP, rhs, lhs);
2476 } else {
2477 __ CmpLeS(FTMP, rhs, lhs);
2478 }
2479 __ Bc1nez(FTMP, label);
2480 break;
2481 default:
2482 LOG(FATAL) << "Unexpected non-floating-point condition";
2483 }
2484 } else {
2485 DCHECK_EQ(type, Primitive::kPrimDouble);
2486 switch (cond) {
2487 case kCondEQ:
2488 __ CmpEqD(FTMP, lhs, rhs);
2489 __ Bc1nez(FTMP, label);
2490 break;
2491 case kCondNE:
2492 __ CmpEqD(FTMP, lhs, rhs);
2493 __ Bc1eqz(FTMP, label);
2494 break;
2495 case kCondLT:
2496 if (gt_bias) {
2497 __ CmpLtD(FTMP, lhs, rhs);
2498 } else {
2499 __ CmpUltD(FTMP, lhs, rhs);
2500 }
2501 __ Bc1nez(FTMP, label);
2502 break;
2503 case kCondLE:
2504 if (gt_bias) {
2505 __ CmpLeD(FTMP, lhs, rhs);
2506 } else {
2507 __ CmpUleD(FTMP, lhs, rhs);
2508 }
2509 __ Bc1nez(FTMP, label);
2510 break;
2511 case kCondGT:
2512 if (gt_bias) {
2513 __ CmpUltD(FTMP, rhs, lhs);
2514 } else {
2515 __ CmpLtD(FTMP, rhs, lhs);
2516 }
2517 __ Bc1nez(FTMP, label);
2518 break;
2519 case kCondGE:
2520 if (gt_bias) {
2521 __ CmpUleD(FTMP, rhs, lhs);
2522 } else {
2523 __ CmpLeD(FTMP, rhs, lhs);
2524 }
2525 __ Bc1nez(FTMP, label);
2526 break;
2527 default:
2528 LOG(FATAL) << "Unexpected non-floating-point condition";
2529 }
2530 }
2531}
2532
Alexey Frunze4dda3372015-06-01 18:31:49 -07002533void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002534 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002535 Mips64Label* true_target,
2536 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002537 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002538
David Brazdil0debae72015-11-12 18:37:00 +00002539 if (true_target == nullptr && false_target == nullptr) {
2540 // Nothing to do. The code always falls through.
2541 return;
2542 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002543 // Constant condition, statically compared against "true" (integer value 1).
2544 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002545 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002546 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002547 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002548 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002549 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002550 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002551 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002552 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002553 }
David Brazdil0debae72015-11-12 18:37:00 +00002554 return;
2555 }
2556
2557 // The following code generates these patterns:
2558 // (1) true_target == nullptr && false_target != nullptr
2559 // - opposite condition true => branch to false_target
2560 // (2) true_target != nullptr && false_target == nullptr
2561 // - condition true => branch to true_target
2562 // (3) true_target != nullptr && false_target != nullptr
2563 // - condition true => branch to true_target
2564 // - branch to false_target
2565 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002566 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002567 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002568 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002569 if (true_target == nullptr) {
2570 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
2571 } else {
2572 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
2573 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002574 } else {
2575 // The condition instruction has not been materialized, use its inputs as
2576 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002577 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002578 Primitive::Type type = condition->InputAt(0)->GetType();
2579 LocationSummary* locations = cond->GetLocations();
2580 IfCondition if_cond = condition->GetCondition();
2581 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00002582
David Brazdil0debae72015-11-12 18:37:00 +00002583 if (true_target == nullptr) {
2584 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002585 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002586 }
2587
Alexey Frunze299a9392015-12-08 16:08:02 -08002588 switch (type) {
2589 default:
2590 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
2591 break;
2592 case Primitive::kPrimLong:
2593 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
2594 break;
2595 case Primitive::kPrimFloat:
2596 case Primitive::kPrimDouble:
2597 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
2598 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002599 }
2600 }
David Brazdil0debae72015-11-12 18:37:00 +00002601
2602 // If neither branch falls through (case 3), the conditional branch to `true_target`
2603 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2604 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002605 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002606 }
2607}
2608
2609void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
2610 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002611 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002612 locations->SetInAt(0, Location::RequiresRegister());
2613 }
2614}
2615
2616void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002617 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2618 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002619 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002620 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002621 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002622 nullptr : codegen_->GetLabelOf(false_successor);
2623 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002624}
2625
2626void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
2627 LocationSummary* locations = new (GetGraph()->GetArena())
2628 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002629 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002630 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002631 locations->SetInAt(0, Location::RequiresRegister());
2632 }
2633}
2634
2635void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002636 SlowPathCodeMIPS64* slow_path =
2637 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002638 GenerateTestAndBranch(deoptimize,
2639 /* condition_input_index */ 0,
2640 slow_path->GetEntryLabel(),
2641 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002642}
2643
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002644void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2645 LocationSummary* locations = new (GetGraph()->GetArena())
2646 LocationSummary(flag, LocationSummary::kNoCall);
2647 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07002648}
2649
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002650void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2651 __ LoadFromOffset(kLoadWord,
2652 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
2653 SP,
2654 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07002655}
2656
David Brazdil74eb1b22015-12-14 11:44:01 +00002657void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
2658 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
2659 if (Primitive::IsFloatingPointType(select->GetType())) {
2660 locations->SetInAt(0, Location::RequiresFpuRegister());
2661 locations->SetInAt(1, Location::RequiresFpuRegister());
2662 } else {
2663 locations->SetInAt(0, Location::RequiresRegister());
2664 locations->SetInAt(1, Location::RequiresRegister());
2665 }
2666 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2667 locations->SetInAt(2, Location::RequiresRegister());
2668 }
2669 locations->SetOut(Location::SameAsFirstInput());
2670}
2671
2672void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
2673 LocationSummary* locations = select->GetLocations();
2674 Mips64Label false_target;
2675 GenerateTestAndBranch(select,
2676 /* condition_input_index */ 2,
2677 /* true_target */ nullptr,
2678 &false_target);
2679 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2680 __ Bind(&false_target);
2681}
2682
David Srbecky0cf44932015-12-09 14:09:59 +00002683void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
2684 new (GetGraph()->GetArena()) LocationSummary(info);
2685}
2686
David Srbeckyd28f4a02016-03-14 17:14:24 +00002687void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2688 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002689}
2690
2691void CodeGeneratorMIPS64::GenerateNop() {
2692 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002693}
2694
Alexey Frunze4dda3372015-06-01 18:31:49 -07002695void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
2696 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2697 LocationSummary* locations =
2698 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2699 locations->SetInAt(0, Location::RequiresRegister());
2700 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2701 locations->SetOut(Location::RequiresFpuRegister());
2702 } else {
2703 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2704 }
2705}
2706
2707void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
2708 const FieldInfo& field_info) {
2709 Primitive::Type type = field_info.GetFieldType();
2710 LocationSummary* locations = instruction->GetLocations();
2711 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2712 LoadOperandType load_type = kLoadUnsignedByte;
2713 switch (type) {
2714 case Primitive::kPrimBoolean:
2715 load_type = kLoadUnsignedByte;
2716 break;
2717 case Primitive::kPrimByte:
2718 load_type = kLoadSignedByte;
2719 break;
2720 case Primitive::kPrimShort:
2721 load_type = kLoadSignedHalfword;
2722 break;
2723 case Primitive::kPrimChar:
2724 load_type = kLoadUnsignedHalfword;
2725 break;
2726 case Primitive::kPrimInt:
2727 case Primitive::kPrimFloat:
2728 load_type = kLoadWord;
2729 break;
2730 case Primitive::kPrimLong:
2731 case Primitive::kPrimDouble:
2732 load_type = kLoadDoubleword;
2733 break;
2734 case Primitive::kPrimNot:
2735 load_type = kLoadUnsignedWord;
2736 break;
2737 case Primitive::kPrimVoid:
2738 LOG(FATAL) << "Unreachable type " << type;
2739 UNREACHABLE();
2740 }
2741 if (!Primitive::IsFloatingPointType(type)) {
2742 DCHECK(locations->Out().IsRegister());
2743 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2744 __ LoadFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2745 } else {
2746 DCHECK(locations->Out().IsFpuRegister());
2747 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2748 __ LoadFpuFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2749 }
2750
2751 codegen_->MaybeRecordImplicitNullCheck(instruction);
2752 // TODO: memory barrier?
2753}
2754
2755void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
2756 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2757 LocationSummary* locations =
2758 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2759 locations->SetInAt(0, Location::RequiresRegister());
2760 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
2761 locations->SetInAt(1, Location::RequiresFpuRegister());
2762 } else {
2763 locations->SetInAt(1, Location::RequiresRegister());
2764 }
2765}
2766
2767void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002768 const FieldInfo& field_info,
2769 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002770 Primitive::Type type = field_info.GetFieldType();
2771 LocationSummary* locations = instruction->GetLocations();
2772 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2773 StoreOperandType store_type = kStoreByte;
2774 switch (type) {
2775 case Primitive::kPrimBoolean:
2776 case Primitive::kPrimByte:
2777 store_type = kStoreByte;
2778 break;
2779 case Primitive::kPrimShort:
2780 case Primitive::kPrimChar:
2781 store_type = kStoreHalfword;
2782 break;
2783 case Primitive::kPrimInt:
2784 case Primitive::kPrimFloat:
2785 case Primitive::kPrimNot:
2786 store_type = kStoreWord;
2787 break;
2788 case Primitive::kPrimLong:
2789 case Primitive::kPrimDouble:
2790 store_type = kStoreDoubleword;
2791 break;
2792 case Primitive::kPrimVoid:
2793 LOG(FATAL) << "Unreachable type " << type;
2794 UNREACHABLE();
2795 }
2796 if (!Primitive::IsFloatingPointType(type)) {
2797 DCHECK(locations->InAt(1).IsRegister());
2798 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
2799 __ StoreToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2800 } else {
2801 DCHECK(locations->InAt(1).IsFpuRegister());
2802 FpuRegister src = locations->InAt(1).AsFpuRegister<FpuRegister>();
2803 __ StoreFpuToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2804 }
2805
2806 codegen_->MaybeRecordImplicitNullCheck(instruction);
2807 // TODO: memory barriers?
2808 if (CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1))) {
2809 DCHECK(locations->InAt(1).IsRegister());
2810 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002811 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002812 }
2813}
2814
2815void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2816 HandleFieldGet(instruction, instruction->GetFieldInfo());
2817}
2818
2819void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2820 HandleFieldGet(instruction, instruction->GetFieldInfo());
2821}
2822
2823void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2824 HandleFieldSet(instruction, instruction->GetFieldInfo());
2825}
2826
2827void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002828 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002829}
2830
2831void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
2832 LocationSummary::CallKind call_kind =
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002833 instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002834 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2835 locations->SetInAt(0, Location::RequiresRegister());
2836 locations->SetInAt(1, Location::RequiresRegister());
2837 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002838 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002839 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2840}
2841
2842void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
2843 LocationSummary* locations = instruction->GetLocations();
2844 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2845 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
2846 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2847
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002848 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002849
2850 // Return 0 if `obj` is null.
2851 // TODO: Avoid this check if we know `obj` is not null.
2852 __ Move(out, ZERO);
2853 __ Beqzc(obj, &done);
2854
2855 // Compare the class of `obj` with `cls`.
2856 __ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00002857 if (instruction->IsExactCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002858 // Classes must be equal for the instanceof to succeed.
2859 __ Xor(out, out, cls);
2860 __ Sltiu(out, out, 1);
2861 } else {
2862 // If the classes are not equal, we go into a slow path.
2863 DCHECK(locations->OnlyCallsOnSlowPath());
2864 SlowPathCodeMIPS64* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002865 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002866 codegen_->AddSlowPath(slow_path);
2867 __ Bnec(out, cls, slow_path->GetEntryLabel());
2868 __ LoadConst32(out, 1);
2869 __ Bind(slow_path->GetExitLabel());
2870 }
2871
2872 __ Bind(&done);
2873}
2874
2875void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
2876 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2877 locations->SetOut(Location::ConstantLocation(constant));
2878}
2879
2880void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
2881 // Will be generated at use site.
2882}
2883
2884void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
2885 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
2886 locations->SetOut(Location::ConstantLocation(constant));
2887}
2888
2889void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
2890 // Will be generated at use site.
2891}
2892
Calin Juravle175dc732015-08-25 15:42:32 +01002893void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2894 // The trampoline uses the same calling convention as dex calling conventions,
2895 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
2896 // the method_idx.
2897 HandleInvoke(invoke);
2898}
2899
2900void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
2901 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
2902}
2903
Alexey Frunze4dda3372015-06-01 18:31:49 -07002904void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
2905 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
2906 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
2907}
2908
2909void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
2910 HandleInvoke(invoke);
2911 // The register T0 is required to be used for the hidden argument in
2912 // art_quick_imt_conflict_trampoline, so add the hidden argument.
2913 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
2914}
2915
2916void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
2917 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
2918 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002919 Location receiver = invoke->GetLocations()->InAt(0);
2920 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07002921 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002922
2923 // Set the hidden argument.
2924 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
2925 invoke->GetDexMethodIndex());
2926
2927 // temp = object->GetClass();
2928 if (receiver.IsStackSlot()) {
2929 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
2930 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
2931 } else {
2932 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
2933 }
2934 codegen_->MaybeRecordImplicitNullCheck(invoke);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00002935 __ LoadFromOffset(kLoadDoubleword, temp, temp,
2936 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
2937 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00002938 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002939 // temp = temp->GetImtEntryAt(method_offset);
2940 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
2941 // T9 = temp->GetEntryPoint();
2942 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
2943 // T9();
2944 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002945 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002946 DCHECK(!codegen_->IsLeafMethod());
2947 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
2948}
2949
2950void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07002951 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
2952 if (intrinsic.TryDispatch(invoke)) {
2953 return;
2954 }
2955
Alexey Frunze4dda3372015-06-01 18:31:49 -07002956 HandleInvoke(invoke);
2957}
2958
2959void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00002960 // Explicit clinit checks triggered by static invokes must have been pruned by
2961 // art::PrepareForRegisterAllocation.
2962 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002963
Chris Larsen3039e382015-08-26 07:54:08 -07002964 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
2965 if (intrinsic.TryDispatch(invoke)) {
2966 return;
2967 }
2968
Alexey Frunze4dda3372015-06-01 18:31:49 -07002969 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002970}
2971
Chris Larsen3039e382015-08-26 07:54:08 -07002972static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002973 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07002974 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
2975 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002976 return true;
2977 }
2978 return false;
2979}
2980
Vladimir Markocac5a7e2016-02-22 10:39:50 +00002981HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
2982 HLoadString::LoadKind desired_string_load_kind ATTRIBUTE_UNUSED) {
2983 // TODO: Implement other kinds.
2984 return HLoadString::LoadKind::kDexCacheViaMethod;
2985}
2986
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01002987HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
2988 HLoadClass::LoadKind desired_class_load_kind) {
2989 DCHECK_NE(desired_class_load_kind, HLoadClass::LoadKind::kReferrersClass);
2990 // TODO: Implement other kinds.
2991 return HLoadClass::LoadKind::kDexCacheViaMethod;
2992}
2993
Vladimir Markodc151b22015-10-15 18:02:30 +01002994HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
2995 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01002996 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Vladimir Markodc151b22015-10-15 18:02:30 +01002997 switch (desired_dispatch_info.method_load_kind) {
2998 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
2999 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
3000 // TODO: Implement these types. For the moment, we fall back to kDexCacheViaMethod.
3001 return HInvokeStaticOrDirect::DispatchInfo {
3002 HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
3003 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
3004 0u,
3005 0u
3006 };
3007 default:
3008 break;
3009 }
3010 switch (desired_dispatch_info.code_ptr_location) {
3011 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3012 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
3013 // TODO: Implement these types. For the moment, we fall back to kCallArtMethod.
3014 return HInvokeStaticOrDirect::DispatchInfo {
3015 desired_dispatch_info.method_load_kind,
3016 HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
3017 desired_dispatch_info.method_load_data,
3018 0u
3019 };
3020 default:
3021 return desired_dispatch_info;
3022 }
3023}
3024
Alexey Frunze4dda3372015-06-01 18:31:49 -07003025void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
3026 // All registers are assumed to be correctly set up per the calling convention.
3027
Vladimir Marko58155012015-08-19 12:49:41 +00003028 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
3029 switch (invoke->GetMethodLoadKind()) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003030 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00003031 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003032 uint32_t offset =
3033 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003034 __ LoadFromOffset(kLoadDoubleword,
3035 temp.AsRegister<GpuRegister>(),
3036 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003037 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003038 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003039 }
Vladimir Marko58155012015-08-19 12:49:41 +00003040 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003041 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003042 break;
3043 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
3044 __ LoadConst64(temp.AsRegister<GpuRegister>(), invoke->GetMethodAddress());
3045 break;
3046 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
Vladimir Marko58155012015-08-19 12:49:41 +00003047 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
Vladimir Markodc151b22015-10-15 18:02:30 +01003048 // TODO: Implement these types.
3049 // Currently filtered out by GetSupportedInvokeStaticOrDirectDispatch().
3050 LOG(FATAL) << "Unsupported";
3051 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00003052 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003053 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003054 GpuRegister reg = temp.AsRegister<GpuRegister>();
3055 GpuRegister method_reg;
3056 if (current_method.IsRegister()) {
3057 method_reg = current_method.AsRegister<GpuRegister>();
3058 } else {
3059 // TODO: use the appropriate DCHECK() here if possible.
3060 // DCHECK(invoke->GetLocations()->Intrinsified());
3061 DCHECK(!current_method.IsValid());
3062 method_reg = reg;
3063 __ Ld(reg, SP, kCurrentMethodStackOffset);
3064 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003065
Vladimir Marko58155012015-08-19 12:49:41 +00003066 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003067 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00003068 reg,
3069 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01003070 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01003071 // temp = temp[index_in_cache];
3072 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3073 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003074 __ LoadFromOffset(kLoadDoubleword,
3075 reg,
3076 reg,
3077 CodeGenerator::GetCachePointerOffset(index_in_cache));
3078 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003079 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003080 }
3081
Vladimir Marko58155012015-08-19 12:49:41 +00003082 switch (invoke->GetCodePtrLocation()) {
3083 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003084 __ Jialc(&frame_entry_label_, T9);
Vladimir Marko58155012015-08-19 12:49:41 +00003085 break;
3086 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3087 // LR = invoke->GetDirectCodePtr();
3088 __ LoadConst64(T9, invoke->GetDirectCodePtr());
3089 // LR()
3090 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003091 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003092 break;
Vladimir Marko58155012015-08-19 12:49:41 +00003093 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
Vladimir Markodc151b22015-10-15 18:02:30 +01003094 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
3095 // TODO: Implement these types.
3096 // Currently filtered out by GetSupportedInvokeStaticOrDirectDispatch().
3097 LOG(FATAL) << "Unsupported";
3098 UNREACHABLE();
Vladimir Marko58155012015-08-19 12:49:41 +00003099 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3100 // T9 = callee_method->entry_point_from_quick_compiled_code_;
3101 __ LoadFromOffset(kLoadDoubleword,
3102 T9,
3103 callee_method.AsRegister<GpuRegister>(),
3104 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07003105 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00003106 // T9()
3107 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003108 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003109 break;
3110 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003111 DCHECK(!IsLeafMethod());
3112}
3113
3114void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003115 // Explicit clinit checks triggered by static invokes must have been pruned by
3116 // art::PrepareForRegisterAllocation.
3117 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003118
3119 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3120 return;
3121 }
3122
3123 LocationSummary* locations = invoke->GetLocations();
3124 codegen_->GenerateStaticOrDirectCall(invoke,
3125 locations->HasTemps()
3126 ? locations->GetTemp(0)
3127 : Location::NoLocation());
3128 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3129}
3130
Alexey Frunze53afca12015-11-05 16:34:23 -08003131void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003132 // Use the calling convention instead of the location of the receiver, as
3133 // intrinsics may have put the receiver in a different register. In the intrinsics
3134 // slow path, the arguments have been moved to the right place, so here we are
3135 // guaranteed that the receiver is the first register of the calling convention.
3136 InvokeDexCallingConvention calling_convention;
3137 GpuRegister receiver = calling_convention.GetRegisterAt(0);
3138
Alexey Frunze53afca12015-11-05 16:34:23 -08003139 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003140 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3141 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
3142 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003143 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003144
3145 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003146 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08003147 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003148 // temp = temp->GetMethodAt(method_offset);
3149 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3150 // T9 = temp->GetEntryPoint();
3151 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3152 // T9();
3153 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003154 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08003155}
3156
3157void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
3158 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3159 return;
3160 }
3161
3162 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003163 DCHECK(!codegen_->IsLeafMethod());
3164 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3165}
3166
3167void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Calin Juravle98893e12015-10-02 21:05:03 +01003168 InvokeRuntimeCallingConvention calling_convention;
3169 CodeGenerator::CreateLoadClassLocationSummary(
3170 cls,
3171 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze00580bd2015-11-11 13:31:12 -08003172 calling_convention.GetReturnLocation(cls->GetType()));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003173}
3174
3175void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
3176 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01003177 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08003178 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003179 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003180 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003181 return;
3182 }
3183
3184 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3185 GpuRegister current_method = locations->InAt(0).AsRegister<GpuRegister>();
3186 if (cls->IsReferrersClass()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003187 DCHECK(!cls->CanCallRuntime());
3188 DCHECK(!cls->MustGenerateClinitCheck());
3189 __ LoadFromOffset(kLoadUnsignedWord, out, current_method,
3190 ArtMethod::DeclaringClassOffset().Int32Value());
3191 } else {
Vladimir Marko05792b92015-08-03 11:56:49 +01003192 __ LoadFromOffset(kLoadDoubleword, out, current_method,
3193 ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
Roland Levillain698fa972015-12-16 17:06:47 +00003194 __ LoadFromOffset(
Andreas Gampea5b09a62016-11-17 15:21:22 -08003195 kLoadUnsignedWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_));
Vladimir Marko05792b92015-08-03 11:56:49 +01003196 // TODO: We will need a read barrier here.
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00003197 if (!cls->IsInDexCache() || cls->MustGenerateClinitCheck()) {
3198 DCHECK(cls->CanCallRuntime());
3199 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
3200 cls,
3201 cls,
3202 cls->GetDexPc(),
3203 cls->MustGenerateClinitCheck());
3204 codegen_->AddSlowPath(slow_path);
3205 if (!cls->IsInDexCache()) {
3206 __ Beqzc(out, slow_path->GetEntryLabel());
3207 }
3208 if (cls->MustGenerateClinitCheck()) {
3209 GenerateClassInitializationCheck(slow_path, out);
3210 } else {
3211 __ Bind(slow_path->GetExitLabel());
3212 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003213 }
3214 }
3215}
3216
David Brazdilcb1c0552015-08-04 16:22:25 +01003217static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07003218 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01003219}
3220
Alexey Frunze4dda3372015-06-01 18:31:49 -07003221void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
3222 LocationSummary* locations =
3223 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3224 locations->SetOut(Location::RequiresRegister());
3225}
3226
3227void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
3228 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01003229 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
3230}
3231
3232void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
3233 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3234}
3235
3236void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3237 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003238}
3239
Alexey Frunze4dda3372015-06-01 18:31:49 -07003240void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003241 LocationSummary::CallKind call_kind = load->NeedsEnvironment()
3242 ? LocationSummary::kCallOnSlowPath
3243 : LocationSummary::kNoCall;
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003244 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003245 locations->SetInAt(0, Location::RequiresRegister());
3246 locations->SetOut(Location::RequiresRegister());
3247}
3248
3249void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) {
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07003250 // TODO: Re-add the compiler code to do string dex cache lookup again.
3251 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
3252 codegen_->AddSlowPath(slow_path);
3253 __ Bc(slow_path->GetEntryLabel());
3254 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003255}
3256
Alexey Frunze4dda3372015-06-01 18:31:49 -07003257void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
3258 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3259 locations->SetOut(Location::ConstantLocation(constant));
3260}
3261
3262void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3263 // Will be generated at use site.
3264}
3265
3266void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
3267 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003268 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003269 InvokeRuntimeCallingConvention calling_convention;
3270 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3271}
3272
3273void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003274 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07003275 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01003276 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003277 if (instruction->IsEnter()) {
3278 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3279 } else {
3280 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3281 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003282}
3283
3284void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
3285 LocationSummary* locations =
3286 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3287 switch (mul->GetResultType()) {
3288 case Primitive::kPrimInt:
3289 case Primitive::kPrimLong:
3290 locations->SetInAt(0, Location::RequiresRegister());
3291 locations->SetInAt(1, Location::RequiresRegister());
3292 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3293 break;
3294
3295 case Primitive::kPrimFloat:
3296 case Primitive::kPrimDouble:
3297 locations->SetInAt(0, Location::RequiresFpuRegister());
3298 locations->SetInAt(1, Location::RequiresFpuRegister());
3299 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3300 break;
3301
3302 default:
3303 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3304 }
3305}
3306
3307void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
3308 Primitive::Type type = instruction->GetType();
3309 LocationSummary* locations = instruction->GetLocations();
3310
3311 switch (type) {
3312 case Primitive::kPrimInt:
3313 case Primitive::kPrimLong: {
3314 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3315 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3316 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
3317 if (type == Primitive::kPrimInt)
3318 __ MulR6(dst, lhs, rhs);
3319 else
3320 __ Dmul(dst, lhs, rhs);
3321 break;
3322 }
3323 case Primitive::kPrimFloat:
3324 case Primitive::kPrimDouble: {
3325 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3326 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3327 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3328 if (type == Primitive::kPrimFloat)
3329 __ MulS(dst, lhs, rhs);
3330 else
3331 __ MulD(dst, lhs, rhs);
3332 break;
3333 }
3334 default:
3335 LOG(FATAL) << "Unexpected mul type " << type;
3336 }
3337}
3338
3339void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
3340 LocationSummary* locations =
3341 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3342 switch (neg->GetResultType()) {
3343 case Primitive::kPrimInt:
3344 case Primitive::kPrimLong:
3345 locations->SetInAt(0, Location::RequiresRegister());
3346 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3347 break;
3348
3349 case Primitive::kPrimFloat:
3350 case Primitive::kPrimDouble:
3351 locations->SetInAt(0, Location::RequiresFpuRegister());
3352 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3353 break;
3354
3355 default:
3356 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3357 }
3358}
3359
3360void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
3361 Primitive::Type type = instruction->GetType();
3362 LocationSummary* locations = instruction->GetLocations();
3363
3364 switch (type) {
3365 case Primitive::kPrimInt:
3366 case Primitive::kPrimLong: {
3367 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3368 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3369 if (type == Primitive::kPrimInt)
3370 __ Subu(dst, ZERO, src);
3371 else
3372 __ Dsubu(dst, ZERO, src);
3373 break;
3374 }
3375 case Primitive::kPrimFloat:
3376 case Primitive::kPrimDouble: {
3377 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3378 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3379 if (type == Primitive::kPrimFloat)
3380 __ NegS(dst, src);
3381 else
3382 __ NegD(dst, src);
3383 break;
3384 }
3385 default:
3386 LOG(FATAL) << "Unexpected neg type " << type;
3387 }
3388}
3389
3390void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
3391 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003392 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003393 InvokeRuntimeCallingConvention calling_convention;
3394 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3395 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3396 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3397 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3398}
3399
3400void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
3401 LocationSummary* locations = instruction->GetLocations();
3402 // Move an uint16_t value to a register.
Andreas Gampea5b09a62016-11-17 15:21:22 -08003403 __ LoadConst32(locations->GetTemp(0).AsRegister<GpuRegister>(),
3404 instruction->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003405 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003406 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3407}
3408
3409void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
3410 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003411 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003412 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003413 if (instruction->IsStringAlloc()) {
3414 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3415 } else {
3416 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3417 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3418 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003419 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3420}
3421
3422void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
David Brazdil6de19382016-01-08 17:37:10 +00003423 if (instruction->IsStringAlloc()) {
3424 // String is allocated through StringFactory. Call NewEmptyString entry point.
3425 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02003426 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07003427 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003428 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
3429 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
3430 __ Jalr(T9);
3431 __ Nop();
3432 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3433 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01003434 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003435 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3436 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003437}
3438
3439void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
3440 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3441 locations->SetInAt(0, Location::RequiresRegister());
3442 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3443}
3444
3445void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
3446 Primitive::Type type = instruction->GetType();
3447 LocationSummary* locations = instruction->GetLocations();
3448
3449 switch (type) {
3450 case Primitive::kPrimInt:
3451 case Primitive::kPrimLong: {
3452 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3453 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3454 __ Nor(dst, src, ZERO);
3455 break;
3456 }
3457
3458 default:
3459 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3460 }
3461}
3462
3463void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3464 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3465 locations->SetInAt(0, Location::RequiresRegister());
3466 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3467}
3468
3469void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3470 LocationSummary* locations = instruction->GetLocations();
3471 __ Xori(locations->Out().AsRegister<GpuRegister>(),
3472 locations->InAt(0).AsRegister<GpuRegister>(),
3473 1);
3474}
3475
3476void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003477 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
3478 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003479}
3480
Calin Juravle2ae48182016-03-16 14:05:09 +00003481void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
3482 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003483 return;
3484 }
3485 Location obj = instruction->GetLocations()->InAt(0);
3486
3487 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00003488 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003489}
3490
Calin Juravle2ae48182016-03-16 14:05:09 +00003491void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003492 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00003493 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003494
3495 Location obj = instruction->GetLocations()->InAt(0);
3496
3497 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3498}
3499
3500void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00003501 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003502}
3503
3504void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
3505 HandleBinaryOp(instruction);
3506}
3507
3508void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
3509 HandleBinaryOp(instruction);
3510}
3511
3512void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3513 LOG(FATAL) << "Unreachable";
3514}
3515
3516void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
3517 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3518}
3519
3520void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
3521 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3522 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3523 if (location.IsStackSlot()) {
3524 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3525 } else if (location.IsDoubleStackSlot()) {
3526 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3527 }
3528 locations->SetOut(location);
3529}
3530
3531void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
3532 ATTRIBUTE_UNUSED) {
3533 // Nothing to do, the parameter is already at its location.
3534}
3535
3536void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
3537 LocationSummary* locations =
3538 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3539 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3540}
3541
3542void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
3543 ATTRIBUTE_UNUSED) {
3544 // Nothing to do, the method is already at its location.
3545}
3546
3547void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
3548 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003549 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003550 locations->SetInAt(i, Location::Any());
3551 }
3552 locations->SetOut(Location::Any());
3553}
3554
3555void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3556 LOG(FATAL) << "Unreachable";
3557}
3558
3559void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
3560 Primitive::Type type = rem->GetResultType();
3561 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003562 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
3563 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003564 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
3565
3566 switch (type) {
3567 case Primitive::kPrimInt:
3568 case Primitive::kPrimLong:
3569 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003570 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003571 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3572 break;
3573
3574 case Primitive::kPrimFloat:
3575 case Primitive::kPrimDouble: {
3576 InvokeRuntimeCallingConvention calling_convention;
3577 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
3578 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
3579 locations->SetOut(calling_convention.GetReturnLocation(type));
3580 break;
3581 }
3582
3583 default:
3584 LOG(FATAL) << "Unexpected rem type " << type;
3585 }
3586}
3587
3588void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
3589 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003590
3591 switch (type) {
3592 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003593 case Primitive::kPrimLong:
3594 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003595 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003596
3597 case Primitive::kPrimFloat:
3598 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01003599 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
3600 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003601 if (type == Primitive::kPrimFloat) {
3602 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
3603 } else {
3604 CheckEntrypointTypes<kQuickFmod, double, double, double>();
3605 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003606 break;
3607 }
3608 default:
3609 LOG(FATAL) << "Unexpected rem type " << type;
3610 }
3611}
3612
3613void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3614 memory_barrier->SetLocations(nullptr);
3615}
3616
3617void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
3618 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
3619}
3620
3621void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
3622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
3623 Primitive::Type return_type = ret->InputAt(0)->GetType();
3624 locations->SetInAt(0, Mips64ReturnLocation(return_type));
3625}
3626
3627void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
3628 codegen_->GenerateFrameExit();
3629}
3630
3631void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
3632 ret->SetLocations(nullptr);
3633}
3634
3635void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
3636 codegen_->GenerateFrameExit();
3637}
3638
Alexey Frunze92d90602015-12-18 18:16:36 -08003639void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
3640 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003641}
3642
Alexey Frunze92d90602015-12-18 18:16:36 -08003643void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
3644 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00003645}
3646
Alexey Frunze4dda3372015-06-01 18:31:49 -07003647void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
3648 HandleShift(shl);
3649}
3650
3651void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
3652 HandleShift(shl);
3653}
3654
3655void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
3656 HandleShift(shr);
3657}
3658
3659void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
3660 HandleShift(shr);
3661}
3662
Alexey Frunze4dda3372015-06-01 18:31:49 -07003663void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
3664 HandleBinaryOp(instruction);
3665}
3666
3667void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
3668 HandleBinaryOp(instruction);
3669}
3670
3671void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3672 HandleFieldGet(instruction, instruction->GetFieldInfo());
3673}
3674
3675void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3676 HandleFieldGet(instruction, instruction->GetFieldInfo());
3677}
3678
3679void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3680 HandleFieldSet(instruction, instruction->GetFieldInfo());
3681}
3682
3683void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003684 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003685}
3686
Calin Juravlee460d1d2015-09-29 04:52:17 +01003687void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
3688 HUnresolvedInstanceFieldGet* instruction) {
3689 FieldAccessCallingConventionMIPS64 calling_convention;
3690 codegen_->CreateUnresolvedFieldLocationSummary(
3691 instruction, instruction->GetFieldType(), calling_convention);
3692}
3693
3694void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
3695 HUnresolvedInstanceFieldGet* instruction) {
3696 FieldAccessCallingConventionMIPS64 calling_convention;
3697 codegen_->GenerateUnresolvedFieldAccess(instruction,
3698 instruction->GetFieldType(),
3699 instruction->GetFieldIndex(),
3700 instruction->GetDexPc(),
3701 calling_convention);
3702}
3703
3704void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
3705 HUnresolvedInstanceFieldSet* instruction) {
3706 FieldAccessCallingConventionMIPS64 calling_convention;
3707 codegen_->CreateUnresolvedFieldLocationSummary(
3708 instruction, instruction->GetFieldType(), calling_convention);
3709}
3710
3711void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
3712 HUnresolvedInstanceFieldSet* instruction) {
3713 FieldAccessCallingConventionMIPS64 calling_convention;
3714 codegen_->GenerateUnresolvedFieldAccess(instruction,
3715 instruction->GetFieldType(),
3716 instruction->GetFieldIndex(),
3717 instruction->GetDexPc(),
3718 calling_convention);
3719}
3720
3721void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
3722 HUnresolvedStaticFieldGet* instruction) {
3723 FieldAccessCallingConventionMIPS64 calling_convention;
3724 codegen_->CreateUnresolvedFieldLocationSummary(
3725 instruction, instruction->GetFieldType(), calling_convention);
3726}
3727
3728void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
3729 HUnresolvedStaticFieldGet* instruction) {
3730 FieldAccessCallingConventionMIPS64 calling_convention;
3731 codegen_->GenerateUnresolvedFieldAccess(instruction,
3732 instruction->GetFieldType(),
3733 instruction->GetFieldIndex(),
3734 instruction->GetDexPc(),
3735 calling_convention);
3736}
3737
3738void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
3739 HUnresolvedStaticFieldSet* instruction) {
3740 FieldAccessCallingConventionMIPS64 calling_convention;
3741 codegen_->CreateUnresolvedFieldLocationSummary(
3742 instruction, instruction->GetFieldType(), calling_convention);
3743}
3744
3745void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
3746 HUnresolvedStaticFieldSet* instruction) {
3747 FieldAccessCallingConventionMIPS64 calling_convention;
3748 codegen_->GenerateUnresolvedFieldAccess(instruction,
3749 instruction->GetFieldType(),
3750 instruction->GetFieldIndex(),
3751 instruction->GetDexPc(),
3752 calling_convention);
3753}
3754
Alexey Frunze4dda3372015-06-01 18:31:49 -07003755void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01003756 LocationSummary* locations =
3757 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003758 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07003759}
3760
3761void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
3762 HBasicBlock* block = instruction->GetBlock();
3763 if (block->GetLoopInformation() != nullptr) {
3764 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3765 // The back edge will generate the suspend check.
3766 return;
3767 }
3768 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3769 // The goto will generate the suspend check.
3770 return;
3771 }
3772 GenerateSuspendCheck(instruction, nullptr);
3773}
3774
Alexey Frunze4dda3372015-06-01 18:31:49 -07003775void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
3776 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003777 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003778 InvokeRuntimeCallingConvention calling_convention;
3779 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3780}
3781
3782void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003783 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003784 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
3785}
3786
3787void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
3788 Primitive::Type input_type = conversion->GetInputType();
3789 Primitive::Type result_type = conversion->GetResultType();
3790 DCHECK_NE(input_type, result_type);
3791
3792 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
3793 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
3794 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
3795 }
3796
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003797 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
3798
3799 if (Primitive::IsFloatingPointType(input_type)) {
3800 locations->SetInAt(0, Location::RequiresFpuRegister());
3801 } else {
3802 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003803 }
3804
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003805 if (Primitive::IsFloatingPointType(result_type)) {
3806 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003807 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003808 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003809 }
3810}
3811
3812void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
3813 LocationSummary* locations = conversion->GetLocations();
3814 Primitive::Type result_type = conversion->GetResultType();
3815 Primitive::Type input_type = conversion->GetInputType();
3816
3817 DCHECK_NE(input_type, result_type);
3818
3819 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
3820 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3821 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3822
3823 switch (result_type) {
3824 case Primitive::kPrimChar:
3825 __ Andi(dst, src, 0xFFFF);
3826 break;
3827 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00003828 if (input_type == Primitive::kPrimLong) {
3829 // Type conversion from long to types narrower than int is a result of code
3830 // transformations. To avoid unpredictable results for SEB and SEH, we first
3831 // need to sign-extend the low 32-bit value into bits 32 through 63.
3832 __ Sll(dst, src, 0);
3833 __ Seb(dst, dst);
3834 } else {
3835 __ Seb(dst, src);
3836 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003837 break;
3838 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00003839 if (input_type == Primitive::kPrimLong) {
3840 // Type conversion from long to types narrower than int is a result of code
3841 // transformations. To avoid unpredictable results for SEB and SEH, we first
3842 // need to sign-extend the low 32-bit value into bits 32 through 63.
3843 __ Sll(dst, src, 0);
3844 __ Seh(dst, dst);
3845 } else {
3846 __ Seh(dst, src);
3847 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003848 break;
3849 case Primitive::kPrimInt:
3850 case Primitive::kPrimLong:
3851 // Sign-extend 32-bit int into bits 32 through 63 for
3852 // int-to-long and long-to-int conversions
3853 __ Sll(dst, src, 0);
3854 break;
3855
3856 default:
3857 LOG(FATAL) << "Unexpected type conversion from " << input_type
3858 << " to " << result_type;
3859 }
3860 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003861 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3862 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3863 if (input_type == Primitive::kPrimLong) {
3864 __ Dmtc1(src, FTMP);
3865 if (result_type == Primitive::kPrimFloat) {
3866 __ Cvtsl(dst, FTMP);
3867 } else {
3868 __ Cvtdl(dst, FTMP);
3869 }
3870 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003871 __ Mtc1(src, FTMP);
3872 if (result_type == Primitive::kPrimFloat) {
3873 __ Cvtsw(dst, FTMP);
3874 } else {
3875 __ Cvtdw(dst, FTMP);
3876 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003877 }
3878 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
3879 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003880 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3881 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3882 Mips64Label truncate;
3883 Mips64Label done;
3884
3885 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
3886 // value when the input is either a NaN or is outside of the range of the output type
3887 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
3888 // the same result.
3889 //
3890 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
3891 // value of the output type if the input is outside of the range after the truncation or
3892 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
3893 // results. This matches the desired float/double-to-int/long conversion exactly.
3894 //
3895 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
3896 //
3897 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
3898 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
3899 // even though it must be NAN2008=1 on R6.
3900 //
3901 // The code takes care of the different behaviors by first comparing the input to the
3902 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
3903 // If the input is greater than or equal to the minimum, it procedes to the truncate
3904 // instruction, which will handle such an input the same way irrespective of NAN2008.
3905 // Otherwise the input is compared to itself to determine whether it is a NaN or not
3906 // in order to return either zero or the minimum value.
3907 //
3908 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
3909 // truncate instruction for MIPS64R6.
3910 if (input_type == Primitive::kPrimFloat) {
3911 uint32_t min_val = (result_type == Primitive::kPrimLong)
3912 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
3913 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
3914 __ LoadConst32(TMP, min_val);
3915 __ Mtc1(TMP, FTMP);
3916 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003917 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003918 uint64_t min_val = (result_type == Primitive::kPrimLong)
3919 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
3920 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
3921 __ LoadConst64(TMP, min_val);
3922 __ Dmtc1(TMP, FTMP);
3923 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003924 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003925
3926 __ Bc1nez(FTMP, &truncate);
3927
3928 if (input_type == Primitive::kPrimFloat) {
3929 __ CmpEqS(FTMP, src, src);
3930 } else {
3931 __ CmpEqD(FTMP, src, src);
3932 }
3933 if (result_type == Primitive::kPrimLong) {
3934 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
3935 } else {
3936 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
3937 }
3938 __ Mfc1(TMP, FTMP);
3939 __ And(dst, dst, TMP);
3940
3941 __ Bc(&done);
3942
3943 __ Bind(&truncate);
3944
3945 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00003946 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003947 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00003948 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003949 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00003950 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003951 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00003952 } else {
3953 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003954 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00003955 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003956 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00003957 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003958 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00003959 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08003960
3961 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003962 } else if (Primitive::IsFloatingPointType(result_type) &&
3963 Primitive::IsFloatingPointType(input_type)) {
3964 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3965 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3966 if (result_type == Primitive::kPrimFloat) {
3967 __ Cvtsd(dst, src);
3968 } else {
3969 __ Cvtds(dst, src);
3970 }
3971 } else {
3972 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
3973 << " to " << result_type;
3974 }
3975}
3976
3977void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
3978 HandleShift(ushr);
3979}
3980
3981void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
3982 HandleShift(ushr);
3983}
3984
3985void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
3986 HandleBinaryOp(instruction);
3987}
3988
3989void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
3990 HandleBinaryOp(instruction);
3991}
3992
3993void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
3994 // Nothing to do, this should be removed during prepare for register allocator.
3995 LOG(FATAL) << "Unreachable";
3996}
3997
3998void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
3999 // Nothing to do, this should be removed during prepare for register allocator.
4000 LOG(FATAL) << "Unreachable";
4001}
4002
4003void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004004 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004005}
4006
4007void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004008 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004009}
4010
4011void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004012 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004013}
4014
4015void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004016 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004017}
4018
4019void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004020 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004021}
4022
4023void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004024 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004025}
4026
4027void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004028 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004029}
4030
4031void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004032 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004033}
4034
4035void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004036 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004037}
4038
4039void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004040 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004041}
4042
4043void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004044 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004045}
4046
4047void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004048 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004049}
4050
Aart Bike9f37602015-10-09 11:15:55 -07004051void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004052 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004053}
4054
4055void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004056 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004057}
4058
4059void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004060 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004061}
4062
4063void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004064 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004065}
4066
4067void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004068 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004069}
4070
4071void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004072 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004073}
4074
4075void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004076 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004077}
4078
4079void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004080 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004081}
4082
Mark Mendellfe57faa2015-09-18 09:26:15 -04004083// Simple implementation of packed switch - generate cascaded compare/jumps.
4084void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4085 LocationSummary* locations =
4086 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4087 locations->SetInAt(0, Location::RequiresRegister());
4088}
4089
4090void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4091 int32_t lower_bound = switch_instr->GetStartValue();
4092 int32_t num_entries = switch_instr->GetNumEntries();
4093 LocationSummary* locations = switch_instr->GetLocations();
4094 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
4095 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4096
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004097 // Create a set of compare/jumps.
4098 GpuRegister temp_reg = TMP;
4099 if (IsInt<16>(-lower_bound)) {
4100 __ Addiu(temp_reg, value_reg, -lower_bound);
4101 } else {
4102 __ LoadConst32(AT, -lower_bound);
4103 __ Addu(temp_reg, value_reg, AT);
4104 }
4105 // Jump to default if index is negative
4106 // Note: We don't check the case that index is positive while value < lower_bound, because in
4107 // this case, index >= num_entries must be true. So that we can save one branch instruction.
4108 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
4109
Mark Mendellfe57faa2015-09-18 09:26:15 -04004110 const ArenaVector<HBasicBlock*>& successors = switch_instr->GetBlock()->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004111 // Jump to successors[0] if value == lower_bound.
4112 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
4113 int32_t last_index = 0;
4114 for (; num_entries - last_index > 2; last_index += 2) {
4115 __ Addiu(temp_reg, temp_reg, -2);
4116 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4117 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
4118 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4119 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
4120 }
4121 if (num_entries - last_index == 2) {
4122 // The last missing case_value.
4123 __ Addiu(temp_reg, temp_reg, -1);
4124 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004125 }
4126
4127 // And the default for any other value.
4128 if (!codegen_->GoesToNextBlock(switch_instr->GetBlock(), default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004129 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004130 }
4131}
4132
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004133void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet*) {
4134 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4135}
4136
4137void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet*) {
4138 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4139}
4140
Alexey Frunze4dda3372015-06-01 18:31:49 -07004141} // namespace mips64
4142} // namespace art