blob: a929d6a145629929dfe166e4273e918f77251547 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
Alexey Frunze4dda3372015-06-01 18:31:49 -070094 // TODO: shouldn't we use a whole machine word per argument on the stack?
95 // Implicit 4-byte method pointer (and such) will cause misalignment.
96
97 return next_location;
98}
99
100Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
101 return Mips64ReturnLocation(type);
102}
103
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100104// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
105#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
108class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
109 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000110 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111
112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100113 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700114 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
115 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000116 if (instruction_->CanThrowIntoCatchBlock()) {
117 // Live registers will be restored in the catch block if caught.
118 SaveLiveRegisters(codegen, instruction_->GetLocations());
119 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700120 // We're moving two locations to locations that could overlap, so we need a parallel
121 // move resolver.
122 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
125 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100126 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700127 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
128 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100129 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
130 ? kQuickThrowStringBounds
131 : kQuickThrowArrayBounds;
132 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100133 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
135 }
136
Alexandre Rames8158f282015-08-07 10:26:17 +0100137 bool IsFatal() const OVERRIDE { return true; }
138
Roland Levillain46648892015-06-19 16:07:18 +0100139 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
140
Alexey Frunze4dda3372015-06-01 18:31:49 -0700141 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700142 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
143};
144
145class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
146 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700148
149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
150 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
151 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100152 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
154 }
155
Alexandre Rames8158f282015-08-07 10:26:17 +0100156 bool IsFatal() const OVERRIDE { return true; }
157
Roland Levillain46648892015-06-19 16:07:18 +0100158 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
162};
163
164class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
165 public:
166 LoadClassSlowPathMIPS64(HLoadClass* cls,
167 HInstruction* at,
168 uint32_t dex_pc,
169 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000170 : SlowPathCodeMIPS64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
172 }
173
174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000175 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700176 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
177
178 __ Bind(GetEntryLabel());
179 SaveLiveRegisters(codegen, locations);
180
181 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000182 dex::TypeIndex type_index = cls_->GetTypeIndex();
183 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100184 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
185 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000186 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700187 if (do_clinit_) {
188 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
189 } else {
190 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
191 }
192
193 // Move the class to the desired location.
194 Location out = locations->Out();
195 if (out.IsValid()) {
196 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000197 Primitive::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700198 mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
199 }
200
201 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000202 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
203 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
204 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
205 DCHECK(out.IsValid());
206 // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
207 // kSaveEverything and use a temporary for the .bss entry address in the fast path,
208 // so that we can avoid another calculation here.
209 DCHECK_NE(out.AsRegister<GpuRegister>(), AT);
210 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
211 mips64_codegen->NewPcRelativeTypePatch(cls_->GetDexFile(), type_index);
212 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
213 __ Sw(out.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
214 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700215 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 }
217
Roland Levillain46648892015-06-19 16:07:18 +0100218 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
219
Alexey Frunze4dda3372015-06-01 18:31:49 -0700220 private:
221 // The class this slow path will load.
222 HLoadClass* const cls_;
223
Alexey Frunze4dda3372015-06-01 18:31:49 -0700224 // The dex PC of `at_`.
225 const uint32_t dex_pc_;
226
227 // Whether to initialize the class.
228 const bool do_clinit_;
229
230 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
231};
232
233class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
234 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000235 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700236
237 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
238 LocationSummary* locations = instruction_->GetLocations();
239 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
240 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
241
242 __ Bind(GetEntryLabel());
243 SaveLiveRegisters(codegen, locations);
244
245 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800246 HLoadString* load = instruction_->AsLoadString();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
248 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100249 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700250 instruction_,
251 instruction_->GetDexPc(),
252 this);
253 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
254 Primitive::Type type = instruction_->GetType();
255 mips64_codegen->MoveLocation(locations->Out(),
256 calling_convention.GetReturnLocation(type),
257 type);
258
259 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800260
261 // Store the resolved String to the BSS entry.
262 // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary for the
263 // .bss entry address in the fast path, so that we can avoid another calculation here.
264 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
265 DCHECK_NE(out, AT);
266 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
267 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
268 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
269 __ Sw(out, AT, /* placeholder */ 0x5678);
270
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700271 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272 }
273
Roland Levillain46648892015-06-19 16:07:18 +0100274 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
275
Alexey Frunze4dda3372015-06-01 18:31:49 -0700276 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700277 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
278};
279
280class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
281 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000282 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700283
284 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
285 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
286 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000287 if (instruction_->CanThrowIntoCatchBlock()) {
288 // Live registers will be restored in the catch block if caught.
289 SaveLiveRegisters(codegen, instruction_->GetLocations());
290 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100291 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700292 instruction_,
293 instruction_->GetDexPc(),
294 this);
295 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
296 }
297
Alexandre Rames8158f282015-08-07 10:26:17 +0100298 bool IsFatal() const OVERRIDE { return true; }
299
Roland Levillain46648892015-06-19 16:07:18 +0100300 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
301
Alexey Frunze4dda3372015-06-01 18:31:49 -0700302 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700303 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
304};
305
306class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
307 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100308 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000309 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700310
311 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
312 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
313 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100314 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700315 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700317 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700318 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700319 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320 }
321 }
322
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700323 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700324 DCHECK(successor_ == nullptr);
325 return &return_label_;
326 }
327
Roland Levillain46648892015-06-19 16:07:18 +0100328 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
329
Alexey Frunze4dda3372015-06-01 18:31:49 -0700330 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700331 // If not null, the block to branch to after the suspend check.
332 HBasicBlock* const successor_;
333
334 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700335 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700336
337 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
338};
339
340class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
341 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000342 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700343
344 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
345 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800346
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100347 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700348 DCHECK(instruction_->IsCheckCast()
349 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
350 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
351
352 __ Bind(GetEntryLabel());
353 SaveLiveRegisters(codegen, locations);
354
355 // We're moving two locations to locations that could overlap, so we need a parallel
356 // move resolver.
357 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800358 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700359 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
360 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800361 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700362 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
363 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700364 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100365 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800366 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700367 Primitive::Type ret_type = instruction_->GetType();
368 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
369 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700370 } else {
371 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800372 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
373 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700374 }
375
376 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700377 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700378 }
379
Roland Levillain46648892015-06-19 16:07:18 +0100380 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
381
Alexey Frunze4dda3372015-06-01 18:31:49 -0700382 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
384};
385
386class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
387 public:
Aart Bik42249c32016-01-07 15:33:50 -0800388 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000389 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390
391 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800392 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100394 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000395 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700396 }
397
Roland Levillain46648892015-06-19 16:07:18 +0100398 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
399
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700401 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
402};
403
404CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
405 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100406 const CompilerOptions& compiler_options,
407 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700408 : CodeGenerator(graph,
409 kNumberOfGpuRegisters,
410 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000411 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
413 arraysize(kCoreCalleeSaves)),
414 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
415 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100416 compiler_options,
417 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100418 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700419 location_builder_(graph, this),
420 instruction_visitor_(graph, this),
421 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100422 assembler_(graph->GetArena()),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800423 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800424 uint32_literals_(std::less<uint32_t>(),
425 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800426 uint64_literals_(std::less<uint64_t>(),
427 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800428 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800429 boot_image_string_patches_(StringReferenceValueComparator(),
430 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
431 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
432 boot_image_type_patches_(TypeReferenceValueComparator(),
433 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
434 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
435 boot_image_address_patches_(std::less<uint32_t>(),
436 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700437 // Save RA (containing the return address) to mimic Quick.
438 AddAllocatedRegister(Location::RegisterLocation(RA));
439}
440
441#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100442// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
443#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700444#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700445
446void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700447 // Ensure that we fix up branches.
448 __ FinalizeCode();
449
450 // Adjust native pc offsets in stack maps.
451 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
452 uint32_t old_position = stack_map_stream_.GetStackMap(i).native_pc_offset;
453 uint32_t new_position = __ GetAdjustedPosition(old_position);
454 DCHECK_GE(new_position, old_position);
455 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
456 }
457
458 // Adjust pc offsets for the disassembly information.
459 if (disasm_info_ != nullptr) {
460 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
461 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
462 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
463 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
464 it.second.start = __ GetAdjustedPosition(it.second.start);
465 it.second.end = __ GetAdjustedPosition(it.second.end);
466 }
467 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
468 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
469 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
470 }
471 }
472
Alexey Frunze4dda3372015-06-01 18:31:49 -0700473 CodeGenerator::Finalize(allocator);
474}
475
476Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
477 return codegen_->GetAssembler();
478}
479
480void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100481 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700482 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
483}
484
485void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100486 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700487 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
488}
489
490void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
491 // Pop reg
492 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +0200493 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700494}
495
496void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
497 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +0200498 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700499 __ Sd(GpuRegister(reg), SP, 0);
500}
501
502void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
503 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
504 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
505 // Allocate a scratch register other than TMP, if available.
506 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
507 // automatically unspilled when the scratch scope object is destroyed).
508 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
509 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +0200510 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700511 __ LoadFromOffset(load_type,
512 GpuRegister(ensure_scratch.GetRegister()),
513 SP,
514 index1 + stack_offset);
515 __ LoadFromOffset(load_type,
516 TMP,
517 SP,
518 index2 + stack_offset);
519 __ StoreToOffset(store_type,
520 GpuRegister(ensure_scratch.GetRegister()),
521 SP,
522 index2 + stack_offset);
523 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
524}
525
526static dwarf::Reg DWARFReg(GpuRegister reg) {
527 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
528}
529
David Srbeckyba702002016-02-01 18:15:29 +0000530static dwarf::Reg DWARFReg(FpuRegister reg) {
531 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
532}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700533
534void CodeGeneratorMIPS64::GenerateFrameEntry() {
535 __ Bind(&frame_entry_label_);
536
537 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
538
539 if (do_overflow_check) {
540 __ LoadFromOffset(kLoadWord,
541 ZERO,
542 SP,
543 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
544 RecordPcInfo(nullptr, 0);
545 }
546
Alexey Frunze4dda3372015-06-01 18:31:49 -0700547 if (HasEmptyFrame()) {
548 return;
549 }
550
551 // Make sure the frame size isn't unreasonably large. Per the various APIs
552 // it looks like it should always be less than 2GB in size, which allows
553 // us using 32-bit signed offsets from the stack pointer.
554 if (GetFrameSize() > 0x7FFFFFFF)
555 LOG(FATAL) << "Stack frame larger than 2GB";
556
557 // Spill callee-saved registers.
558 // Note that their cumulative size is small and they can be indexed using
559 // 16-bit offsets.
560
561 // TODO: increment/decrement SP in one step instead of two or remove this comment.
562
563 uint32_t ofs = FrameEntrySpillSize();
564 __ IncreaseFrameSize(ofs);
565
566 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
567 GpuRegister reg = kCoreCalleeSaves[i];
568 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200569 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700570 __ Sd(reg, SP, ofs);
571 __ cfi().RelOffset(DWARFReg(reg), ofs);
572 }
573 }
574
575 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
576 FpuRegister reg = kFpuCalleeSaves[i];
577 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200578 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700579 __ Sdc1(reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +0000580 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700581 }
582 }
583
584 // Allocate the rest of the frame and store the current method pointer
585 // at its end.
586
587 __ IncreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
588
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +0100589 // Save the current method if we need it. Note that we do not
590 // do this in HCurrentMethod, as the instruction might have been removed
591 // in the SSA graph.
592 if (RequiresCurrentMethod()) {
593 static_assert(IsInt<16>(kCurrentMethodStackOffset),
594 "kCurrentMethodStackOffset must fit into int16_t");
595 __ Sd(kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
596 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +0100597
598 if (GetGraph()->HasShouldDeoptimizeFlag()) {
599 // Initialize should_deoptimize flag to 0.
600 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
601 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700602}
603
604void CodeGeneratorMIPS64::GenerateFrameExit() {
605 __ cfi().RememberState();
606
Alexey Frunze4dda3372015-06-01 18:31:49 -0700607 if (!HasEmptyFrame()) {
608 // Deallocate the rest of the frame.
609
610 __ DecreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
611
612 // Restore callee-saved registers.
613 // Note that their cumulative size is small and they can be indexed using
614 // 16-bit offsets.
615
616 // TODO: increment/decrement SP in one step instead of two or remove this comment.
617
618 uint32_t ofs = 0;
619
620 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
621 FpuRegister reg = kFpuCalleeSaves[i];
622 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
623 __ Ldc1(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200624 ofs += kMips64DoublewordSize;
David Srbeckyba702002016-02-01 18:15:29 +0000625 __ cfi().Restore(DWARFReg(reg));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700626 }
627 }
628
629 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
630 GpuRegister reg = kCoreCalleeSaves[i];
631 if (allocated_registers_.ContainsCoreRegister(reg)) {
632 __ Ld(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200633 ofs += kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700634 __ cfi().Restore(DWARFReg(reg));
635 }
636 }
637
638 DCHECK_EQ(ofs, FrameEntrySpillSize());
639 __ DecreaseFrameSize(ofs);
640 }
641
642 __ Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700643 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700644
645 __ cfi().RestoreState();
646 __ cfi().DefCFAOffset(GetFrameSize());
647}
648
649void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
650 __ Bind(GetLabelOf(block));
651}
652
653void CodeGeneratorMIPS64::MoveLocation(Location destination,
654 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100655 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700656 if (source.Equals(destination)) {
657 return;
658 }
659
660 // A valid move can always be inferred from the destination and source
661 // locations. When moving from and to a register, the argument type can be
662 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100663 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700664 DCHECK_EQ(unspecified_type, false);
665
666 if (destination.IsRegister() || destination.IsFpuRegister()) {
667 if (unspecified_type) {
668 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
669 if (source.IsStackSlot() ||
670 (src_cst != nullptr && (src_cst->IsIntConstant()
671 || src_cst->IsFloatConstant()
672 || src_cst->IsNullConstant()))) {
673 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100674 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700675 } else {
676 // If the source is a double stack slot or a 64bit constant, a 64bit
677 // type is appropriate. Else the source is a register, and since the
678 // type has not been specified, we chose a 64bit type to force a 64bit
679 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100680 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700681 }
682 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100683 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
684 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700685 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
686 // Move to GPR/FPR from stack
687 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100688 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700689 __ LoadFpuFromOffset(load_type,
690 destination.AsFpuRegister<FpuRegister>(),
691 SP,
692 source.GetStackIndex());
693 } else {
694 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
695 __ LoadFromOffset(load_type,
696 destination.AsRegister<GpuRegister>(),
697 SP,
698 source.GetStackIndex());
699 }
700 } else if (source.IsConstant()) {
701 // Move to GPR/FPR from constant
702 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100703 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700704 gpr = destination.AsRegister<GpuRegister>();
705 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100706 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700707 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
708 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
709 gpr = ZERO;
710 } else {
711 __ LoadConst32(gpr, value);
712 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700713 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700714 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
715 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
716 gpr = ZERO;
717 } else {
718 __ LoadConst64(gpr, value);
719 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700720 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100721 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700722 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +0100723 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700724 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
725 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100726 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700727 if (destination.IsRegister()) {
728 // Move to GPR from GPR
729 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
730 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100731 DCHECK(destination.IsFpuRegister());
732 if (Primitive::Is64BitType(dst_type)) {
733 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
734 } else {
735 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
736 }
737 }
738 } else if (source.IsFpuRegister()) {
739 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700740 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +0100741 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700742 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
743 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100744 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700745 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
746 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100747 } else {
748 DCHECK(destination.IsRegister());
749 if (Primitive::Is64BitType(dst_type)) {
750 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
751 } else {
752 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
753 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700754 }
755 }
756 } else { // The destination is not a register. It must be a stack slot.
757 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
758 if (source.IsRegister() || source.IsFpuRegister()) {
759 if (unspecified_type) {
760 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100761 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700762 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100763 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700764 }
765 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100766 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
767 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700768 // Move to stack from GPR/FPR
769 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
770 if (source.IsRegister()) {
771 __ StoreToOffset(store_type,
772 source.AsRegister<GpuRegister>(),
773 SP,
774 destination.GetStackIndex());
775 } else {
776 __ StoreFpuToOffset(store_type,
777 source.AsFpuRegister<FpuRegister>(),
778 SP,
779 destination.GetStackIndex());
780 }
781 } else if (source.IsConstant()) {
782 // Move to stack from constant
783 HConstant* src_cst = source.GetConstant();
784 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700785 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700786 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700787 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
788 if (value != 0) {
789 gpr = TMP;
790 __ LoadConst32(gpr, value);
791 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700792 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700793 DCHECK(destination.IsDoubleStackSlot());
794 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
795 if (value != 0) {
796 gpr = TMP;
797 __ LoadConst64(gpr, value);
798 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700799 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700800 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700801 } else {
802 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
803 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
804 // Move to stack from stack
805 if (destination.IsStackSlot()) {
806 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
807 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
808 } else {
809 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
810 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
811 }
812 }
813 }
814}
815
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700816void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700817 DCHECK(!loc1.IsConstant());
818 DCHECK(!loc2.IsConstant());
819
820 if (loc1.Equals(loc2)) {
821 return;
822 }
823
824 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
825 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
826 bool is_fp_reg1 = loc1.IsFpuRegister();
827 bool is_fp_reg2 = loc2.IsFpuRegister();
828
829 if (loc2.IsRegister() && loc1.IsRegister()) {
830 // Swap 2 GPRs
831 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
832 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
833 __ Move(TMP, r2);
834 __ Move(r2, r1);
835 __ Move(r1, TMP);
836 } else if (is_fp_reg2 && is_fp_reg1) {
837 // Swap 2 FPRs
838 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
839 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700840 if (type == Primitive::kPrimFloat) {
841 __ MovS(FTMP, r1);
842 __ MovS(r1, r2);
843 __ MovS(r2, FTMP);
844 } else {
845 DCHECK_EQ(type, Primitive::kPrimDouble);
846 __ MovD(FTMP, r1);
847 __ MovD(r1, r2);
848 __ MovD(r2, FTMP);
849 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700850 } else if (is_slot1 != is_slot2) {
851 // Swap GPR/FPR and stack slot
852 Location reg_loc = is_slot1 ? loc2 : loc1;
853 Location mem_loc = is_slot1 ? loc1 : loc2;
854 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
855 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
856 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
857 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
858 if (reg_loc.IsFpuRegister()) {
859 __ StoreFpuToOffset(store_type,
860 reg_loc.AsFpuRegister<FpuRegister>(),
861 SP,
862 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700863 if (mem_loc.IsStackSlot()) {
864 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
865 } else {
866 DCHECK(mem_loc.IsDoubleStackSlot());
867 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
868 }
869 } else {
870 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
871 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
872 }
873 } else if (is_slot1 && is_slot2) {
874 move_resolver_.Exchange(loc1.GetStackIndex(),
875 loc2.GetStackIndex(),
876 loc1.IsDoubleStackSlot());
877 } else {
878 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
879 }
880}
881
Calin Juravle175dc732015-08-25 15:42:32 +0100882void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
883 DCHECK(location.IsRegister());
884 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
885}
886
Calin Juravlee460d1d2015-09-29 04:52:17 +0100887void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
888 if (location.IsRegister()) {
889 locations->AddTemp(location);
890 } else {
891 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
892 }
893}
894
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100895void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
896 GpuRegister value,
897 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700898 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700899 GpuRegister card = AT;
900 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100901 if (value_can_be_null) {
902 __ Beqzc(value, &done);
903 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700904 __ LoadFromOffset(kLoadDoubleword,
905 card,
906 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700907 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700908 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
909 __ Daddu(temp, card, temp);
910 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100911 if (value_can_be_null) {
912 __ Bind(&done);
913 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700914}
915
Alexey Frunze19f6c692016-11-30 19:19:55 -0800916template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
917inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
918 const ArenaDeque<PcRelativePatchInfo>& infos,
919 ArenaVector<LinkerPatch>* linker_patches) {
920 for (const PcRelativePatchInfo& info : infos) {
921 const DexFile& dex_file = info.target_dex_file;
922 size_t offset_or_index = info.offset_or_index;
923 DCHECK(info.pc_rel_label.IsBound());
924 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
925 linker_patches->push_back(Factory(pc_rel_offset, &dex_file, pc_rel_offset, offset_or_index));
926 }
927}
928
929void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
930 DCHECK(linker_patches->empty());
931 size_t size =
Alexey Frunze19f6c692016-11-30 19:19:55 -0800932 pc_relative_dex_cache_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -0800933 pc_relative_string_patches_.size() +
934 pc_relative_type_patches_.size() +
935 boot_image_string_patches_.size() +
936 boot_image_type_patches_.size() +
937 boot_image_address_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -0800938 linker_patches->reserve(size);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800939 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
940 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800941 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000942 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(pc_relative_type_patches_,
943 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800944 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
945 linker_patches);
946 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000947 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
948 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800949 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
950 linker_patches);
951 }
Alexey Frunzef63f5692016-12-13 17:43:11 -0800952 for (const auto& entry : boot_image_string_patches_) {
953 const StringReference& target_string = entry.first;
954 Literal* literal = entry.second;
955 DCHECK(literal->GetLabel()->IsBound());
956 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
957 linker_patches->push_back(LinkerPatch::StringPatch(literal_offset,
958 target_string.dex_file,
959 target_string.string_index.index_));
960 }
961 for (const auto& entry : boot_image_type_patches_) {
962 const TypeReference& target_type = entry.first;
963 Literal* literal = entry.second;
964 DCHECK(literal->GetLabel()->IsBound());
965 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
966 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset,
967 target_type.dex_file,
968 target_type.type_index.index_));
969 }
970 for (const auto& entry : boot_image_address_patches_) {
971 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
972 Literal* literal = entry.second;
973 DCHECK(literal->GetLabel()->IsBound());
974 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
975 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
976 }
977}
978
979CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000980 const DexFile& dex_file, dex::StringIndex string_index) {
981 return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800982}
983
984CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
985 const DexFile& dex_file, dex::TypeIndex type_index) {
986 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -0800987}
988
989CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
990 const DexFile& dex_file, uint32_t element_offset) {
991 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
992}
993
Alexey Frunze19f6c692016-11-30 19:19:55 -0800994CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
995 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
996 patches->emplace_back(dex_file, offset_or_index);
997 return &patches->back();
998}
999
Alexey Frunzef63f5692016-12-13 17:43:11 -08001000Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1001 return map->GetOrCreate(
1002 value,
1003 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1004}
1005
Alexey Frunze19f6c692016-11-30 19:19:55 -08001006Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1007 return uint64_literals_.GetOrCreate(
1008 value,
1009 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1010}
1011
1012Literal* CodeGeneratorMIPS64::DeduplicateMethodLiteral(MethodReference target_method,
1013 MethodToLiteralMap* map) {
1014 return map->GetOrCreate(
1015 target_method,
1016 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1017}
1018
Alexey Frunzef63f5692016-12-13 17:43:11 -08001019Literal* CodeGeneratorMIPS64::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
1020 dex::StringIndex string_index) {
1021 return boot_image_string_patches_.GetOrCreate(
1022 StringReference(&dex_file, string_index),
1023 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1024}
1025
1026Literal* CodeGeneratorMIPS64::DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
1027 dex::TypeIndex type_index) {
1028 return boot_image_type_patches_.GetOrCreate(
1029 TypeReference(&dex_file, type_index),
1030 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1031}
1032
1033Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
1034 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
1035 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
1036 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
1037}
1038
Alexey Frunze19f6c692016-11-30 19:19:55 -08001039void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1040 GpuRegister out) {
1041 __ Bind(&info->pc_rel_label);
1042 // Add the high half of a 32-bit offset to PC.
1043 __ Auipc(out, /* placeholder */ 0x1234);
1044 // The immediately following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001045 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze19f6c692016-11-30 19:19:55 -08001046}
1047
David Brazdil58282f42016-01-14 12:45:10 +00001048void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001049 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1050 blocked_core_registers_[ZERO] = true;
1051 blocked_core_registers_[K0] = true;
1052 blocked_core_registers_[K1] = true;
1053 blocked_core_registers_[GP] = true;
1054 blocked_core_registers_[SP] = true;
1055 blocked_core_registers_[RA] = true;
1056
Lazar Trsicd9672662015-09-03 17:33:01 +02001057 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1058 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001059 blocked_core_registers_[AT] = true;
1060 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001061 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001062 blocked_fpu_registers_[FTMP] = true;
1063
1064 // Reserve suspend and thread registers.
1065 blocked_core_registers_[S0] = true;
1066 blocked_core_registers_[TR] = true;
1067
1068 // Reserve T9 for function calls
1069 blocked_core_registers_[T9] = true;
1070
Goran Jakovljevic782be112016-06-21 12:39:04 +02001071 if (GetGraph()->IsDebuggable()) {
1072 // Stubs do not save callee-save floating point registers. If the graph
1073 // is debuggable, we need to deal with these registers differently. For
1074 // now, just block them.
1075 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1076 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1077 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001078 }
1079}
1080
Alexey Frunze4dda3372015-06-01 18:31:49 -07001081size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1082 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001083 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001084}
1085
1086size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1087 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001088 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001089}
1090
1091size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1092 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001093 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001094}
1095
1096size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1097 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001098 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001099}
1100
1101void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001102 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001103}
1104
1105void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001106 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001107}
1108
Calin Juravle175dc732015-08-25 15:42:32 +01001109void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001110 HInstruction* instruction,
1111 uint32_t dex_pc,
1112 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001113 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescufc734082016-07-19 17:18:07 +01001114 __ LoadFromOffset(kLoadDoubleword,
1115 T9,
1116 TR,
1117 GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001118 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001119 __ Nop();
Serban Constantinescufc734082016-07-19 17:18:07 +01001120 if (EntrypointRequiresStackMap(entrypoint)) {
1121 RecordPcInfo(instruction, dex_pc, slow_path);
1122 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123}
1124
1125void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1126 GpuRegister class_reg) {
1127 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1128 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1129 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
1130 // TODO: barrier needed?
1131 __ Bind(slow_path->GetExitLabel());
1132}
1133
1134void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1135 __ Sync(0); // only stype 0 is supported
1136}
1137
1138void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1139 HBasicBlock* successor) {
1140 SuspendCheckSlowPathMIPS64* slow_path =
1141 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1142 codegen_->AddSlowPath(slow_path);
1143
1144 __ LoadFromOffset(kLoadUnsignedHalfword,
1145 TMP,
1146 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001147 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001148 if (successor == nullptr) {
1149 __ Bnezc(TMP, slow_path->GetEntryLabel());
1150 __ Bind(slow_path->GetReturnLabel());
1151 } else {
1152 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001153 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001154 // slow_path will return to GetLabelOf(successor).
1155 }
1156}
1157
1158InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1159 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001160 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001161 assembler_(codegen->GetAssembler()),
1162 codegen_(codegen) {}
1163
1164void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1165 DCHECK_EQ(instruction->InputCount(), 2U);
1166 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1167 Primitive::Type type = instruction->GetResultType();
1168 switch (type) {
1169 case Primitive::kPrimInt:
1170 case Primitive::kPrimLong: {
1171 locations->SetInAt(0, Location::RequiresRegister());
1172 HInstruction* right = instruction->InputAt(1);
1173 bool can_use_imm = false;
1174 if (right->IsConstant()) {
1175 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1176 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1177 can_use_imm = IsUint<16>(imm);
1178 } else if (instruction->IsAdd()) {
1179 can_use_imm = IsInt<16>(imm);
1180 } else {
1181 DCHECK(instruction->IsSub());
1182 can_use_imm = IsInt<16>(-imm);
1183 }
1184 }
1185 if (can_use_imm)
1186 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1187 else
1188 locations->SetInAt(1, Location::RequiresRegister());
1189 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1190 }
1191 break;
1192
1193 case Primitive::kPrimFloat:
1194 case Primitive::kPrimDouble:
1195 locations->SetInAt(0, Location::RequiresFpuRegister());
1196 locations->SetInAt(1, Location::RequiresFpuRegister());
1197 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1198 break;
1199
1200 default:
1201 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1202 }
1203}
1204
1205void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1206 Primitive::Type type = instruction->GetType();
1207 LocationSummary* locations = instruction->GetLocations();
1208
1209 switch (type) {
1210 case Primitive::kPrimInt:
1211 case Primitive::kPrimLong: {
1212 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1213 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1214 Location rhs_location = locations->InAt(1);
1215
1216 GpuRegister rhs_reg = ZERO;
1217 int64_t rhs_imm = 0;
1218 bool use_imm = rhs_location.IsConstant();
1219 if (use_imm) {
1220 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1221 } else {
1222 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1223 }
1224
1225 if (instruction->IsAnd()) {
1226 if (use_imm)
1227 __ Andi(dst, lhs, rhs_imm);
1228 else
1229 __ And(dst, lhs, rhs_reg);
1230 } else if (instruction->IsOr()) {
1231 if (use_imm)
1232 __ Ori(dst, lhs, rhs_imm);
1233 else
1234 __ Or(dst, lhs, rhs_reg);
1235 } else if (instruction->IsXor()) {
1236 if (use_imm)
1237 __ Xori(dst, lhs, rhs_imm);
1238 else
1239 __ Xor(dst, lhs, rhs_reg);
1240 } else if (instruction->IsAdd()) {
1241 if (type == Primitive::kPrimInt) {
1242 if (use_imm)
1243 __ Addiu(dst, lhs, rhs_imm);
1244 else
1245 __ Addu(dst, lhs, rhs_reg);
1246 } else {
1247 if (use_imm)
1248 __ Daddiu(dst, lhs, rhs_imm);
1249 else
1250 __ Daddu(dst, lhs, rhs_reg);
1251 }
1252 } else {
1253 DCHECK(instruction->IsSub());
1254 if (type == Primitive::kPrimInt) {
1255 if (use_imm)
1256 __ Addiu(dst, lhs, -rhs_imm);
1257 else
1258 __ Subu(dst, lhs, rhs_reg);
1259 } else {
1260 if (use_imm)
1261 __ Daddiu(dst, lhs, -rhs_imm);
1262 else
1263 __ Dsubu(dst, lhs, rhs_reg);
1264 }
1265 }
1266 break;
1267 }
1268 case Primitive::kPrimFloat:
1269 case Primitive::kPrimDouble: {
1270 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1271 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1272 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1273 if (instruction->IsAdd()) {
1274 if (type == Primitive::kPrimFloat)
1275 __ AddS(dst, lhs, rhs);
1276 else
1277 __ AddD(dst, lhs, rhs);
1278 } else if (instruction->IsSub()) {
1279 if (type == Primitive::kPrimFloat)
1280 __ SubS(dst, lhs, rhs);
1281 else
1282 __ SubD(dst, lhs, rhs);
1283 } else {
1284 LOG(FATAL) << "Unexpected floating-point binary operation";
1285 }
1286 break;
1287 }
1288 default:
1289 LOG(FATAL) << "Unexpected binary operation type " << type;
1290 }
1291}
1292
1293void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001294 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001295
1296 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1297 Primitive::Type type = instr->GetResultType();
1298 switch (type) {
1299 case Primitive::kPrimInt:
1300 case Primitive::kPrimLong: {
1301 locations->SetInAt(0, Location::RequiresRegister());
1302 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001303 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001304 break;
1305 }
1306 default:
1307 LOG(FATAL) << "Unexpected shift type " << type;
1308 }
1309}
1310
1311void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001312 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001313 LocationSummary* locations = instr->GetLocations();
1314 Primitive::Type type = instr->GetType();
1315
1316 switch (type) {
1317 case Primitive::kPrimInt:
1318 case Primitive::kPrimLong: {
1319 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1320 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1321 Location rhs_location = locations->InAt(1);
1322
1323 GpuRegister rhs_reg = ZERO;
1324 int64_t rhs_imm = 0;
1325 bool use_imm = rhs_location.IsConstant();
1326 if (use_imm) {
1327 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1328 } else {
1329 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1330 }
1331
1332 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001333 uint32_t shift_value = rhs_imm &
1334 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001335
Alexey Frunze92d90602015-12-18 18:16:36 -08001336 if (shift_value == 0) {
1337 if (dst != lhs) {
1338 __ Move(dst, lhs);
1339 }
1340 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001341 if (instr->IsShl()) {
1342 __ Sll(dst, lhs, shift_value);
1343 } else if (instr->IsShr()) {
1344 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001345 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001346 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001347 } else {
1348 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001349 }
1350 } else {
1351 if (shift_value < 32) {
1352 if (instr->IsShl()) {
1353 __ Dsll(dst, lhs, shift_value);
1354 } else if (instr->IsShr()) {
1355 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001356 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001357 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001358 } else {
1359 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001360 }
1361 } else {
1362 shift_value -= 32;
1363 if (instr->IsShl()) {
1364 __ Dsll32(dst, lhs, shift_value);
1365 } else if (instr->IsShr()) {
1366 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001367 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001368 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001369 } else {
1370 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001371 }
1372 }
1373 }
1374 } else {
1375 if (type == Primitive::kPrimInt) {
1376 if (instr->IsShl()) {
1377 __ Sllv(dst, lhs, rhs_reg);
1378 } else if (instr->IsShr()) {
1379 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001380 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001381 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001382 } else {
1383 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001384 }
1385 } else {
1386 if (instr->IsShl()) {
1387 __ Dsllv(dst, lhs, rhs_reg);
1388 } else if (instr->IsShr()) {
1389 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001390 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001391 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001392 } else {
1393 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001394 }
1395 }
1396 }
1397 break;
1398 }
1399 default:
1400 LOG(FATAL) << "Unexpected shift operation type " << type;
1401 }
1402}
1403
1404void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1405 HandleBinaryOp(instruction);
1406}
1407
1408void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1409 HandleBinaryOp(instruction);
1410}
1411
1412void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1413 HandleBinaryOp(instruction);
1414}
1415
1416void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1417 HandleBinaryOp(instruction);
1418}
1419
1420void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
1421 LocationSummary* locations =
1422 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1423 locations->SetInAt(0, Location::RequiresRegister());
1424 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1425 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1426 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1427 } else {
1428 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1429 }
1430}
1431
1432void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
1433 LocationSummary* locations = instruction->GetLocations();
1434 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1435 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001436 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001437
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001438 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001439 switch (type) {
1440 case Primitive::kPrimBoolean: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001441 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1442 if (index.IsConstant()) {
1443 size_t offset =
1444 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1445 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1446 } else {
1447 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1448 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
1449 }
1450 break;
1451 }
1452
1453 case Primitive::kPrimByte: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001454 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1455 if (index.IsConstant()) {
1456 size_t offset =
1457 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1458 __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1459 } else {
1460 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1461 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset);
1462 }
1463 break;
1464 }
1465
1466 case Primitive::kPrimShort: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001467 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1468 if (index.IsConstant()) {
1469 size_t offset =
1470 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1471 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1472 } else {
1473 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1474 __ Daddu(TMP, obj, TMP);
1475 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset);
1476 }
1477 break;
1478 }
1479
1480 case Primitive::kPrimChar: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001481 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1482 if (index.IsConstant()) {
1483 size_t offset =
1484 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1485 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1486 } else {
1487 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1488 __ Daddu(TMP, obj, TMP);
1489 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
1490 }
1491 break;
1492 }
1493
1494 case Primitive::kPrimInt:
1495 case Primitive::kPrimNot: {
1496 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001497 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1498 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
1499 if (index.IsConstant()) {
1500 size_t offset =
1501 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1502 __ LoadFromOffset(load_type, out, obj, offset);
1503 } else {
1504 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1505 __ Daddu(TMP, obj, TMP);
1506 __ LoadFromOffset(load_type, out, TMP, data_offset);
1507 }
1508 break;
1509 }
1510
1511 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001512 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1513 if (index.IsConstant()) {
1514 size_t offset =
1515 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1516 __ LoadFromOffset(kLoadDoubleword, out, obj, offset);
1517 } else {
1518 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1519 __ Daddu(TMP, obj, TMP);
1520 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset);
1521 }
1522 break;
1523 }
1524
1525 case Primitive::kPrimFloat: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001526 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1527 if (index.IsConstant()) {
1528 size_t offset =
1529 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1530 __ LoadFpuFromOffset(kLoadWord, out, obj, offset);
1531 } else {
1532 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1533 __ Daddu(TMP, obj, TMP);
1534 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset);
1535 }
1536 break;
1537 }
1538
1539 case Primitive::kPrimDouble: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001540 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1541 if (index.IsConstant()) {
1542 size_t offset =
1543 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1544 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset);
1545 } else {
1546 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1547 __ Daddu(TMP, obj, TMP);
1548 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset);
1549 }
1550 break;
1551 }
1552
1553 case Primitive::kPrimVoid:
1554 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1555 UNREACHABLE();
1556 }
1557 codegen_->MaybeRecordImplicitNullCheck(instruction);
1558}
1559
1560void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
1561 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1562 locations->SetInAt(0, Location::RequiresRegister());
1563 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1564}
1565
1566void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
1567 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01001568 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001569 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1570 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1571 __ LoadFromOffset(kLoadWord, out, obj, offset);
1572 codegen_->MaybeRecordImplicitNullCheck(instruction);
1573}
1574
1575void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
David Brazdilbb3d5052015-09-21 18:39:16 +01001576 bool needs_runtime_call = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001577 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1578 instruction,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001579 needs_runtime_call ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
David Brazdilbb3d5052015-09-21 18:39:16 +01001580 if (needs_runtime_call) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001581 InvokeRuntimeCallingConvention calling_convention;
1582 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1583 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1584 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1585 } else {
1586 locations->SetInAt(0, Location::RequiresRegister());
1587 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1588 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1589 locations->SetInAt(2, Location::RequiresFpuRegister());
1590 } else {
1591 locations->SetInAt(2, Location::RequiresRegister());
1592 }
1593 }
1594}
1595
1596void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
1597 LocationSummary* locations = instruction->GetLocations();
1598 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1599 Location index = locations->InAt(1);
1600 Primitive::Type value_type = instruction->GetComponentType();
1601 bool needs_runtime_call = locations->WillCall();
1602 bool needs_write_barrier =
1603 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
1604
1605 switch (value_type) {
1606 case Primitive::kPrimBoolean:
1607 case Primitive::kPrimByte: {
1608 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1609 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1610 if (index.IsConstant()) {
1611 size_t offset =
1612 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1613 __ StoreToOffset(kStoreByte, value, obj, offset);
1614 } else {
1615 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1616 __ StoreToOffset(kStoreByte, value, TMP, data_offset);
1617 }
1618 break;
1619 }
1620
1621 case Primitive::kPrimShort:
1622 case Primitive::kPrimChar: {
1623 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1624 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1625 if (index.IsConstant()) {
1626 size_t offset =
1627 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1628 __ StoreToOffset(kStoreHalfword, value, obj, offset);
1629 } else {
1630 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1631 __ Daddu(TMP, obj, TMP);
1632 __ StoreToOffset(kStoreHalfword, value, TMP, data_offset);
1633 }
1634 break;
1635 }
1636
1637 case Primitive::kPrimInt:
1638 case Primitive::kPrimNot: {
1639 if (!needs_runtime_call) {
1640 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1641 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1642 if (index.IsConstant()) {
1643 size_t offset =
1644 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1645 __ StoreToOffset(kStoreWord, value, obj, offset);
1646 } else {
1647 DCHECK(index.IsRegister()) << index;
1648 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1649 __ Daddu(TMP, obj, TMP);
1650 __ StoreToOffset(kStoreWord, value, TMP, data_offset);
1651 }
1652 codegen_->MaybeRecordImplicitNullCheck(instruction);
1653 if (needs_write_barrier) {
1654 DCHECK_EQ(value_type, Primitive::kPrimNot);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001655 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001656 }
1657 } else {
1658 DCHECK_EQ(value_type, Primitive::kPrimNot);
Serban Constantinescufc734082016-07-19 17:18:07 +01001659 codegen_->InvokeRuntime(kQuickAputObject, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00001660 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001661 }
1662 break;
1663 }
1664
1665 case Primitive::kPrimLong: {
1666 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1667 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1668 if (index.IsConstant()) {
1669 size_t offset =
1670 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1671 __ StoreToOffset(kStoreDoubleword, value, obj, offset);
1672 } else {
1673 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1674 __ Daddu(TMP, obj, TMP);
1675 __ StoreToOffset(kStoreDoubleword, value, TMP, data_offset);
1676 }
1677 break;
1678 }
1679
1680 case Primitive::kPrimFloat: {
1681 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
1682 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1683 DCHECK(locations->InAt(2).IsFpuRegister());
1684 if (index.IsConstant()) {
1685 size_t offset =
1686 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1687 __ StoreFpuToOffset(kStoreWord, value, obj, offset);
1688 } else {
1689 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1690 __ Daddu(TMP, obj, TMP);
1691 __ StoreFpuToOffset(kStoreWord, value, TMP, data_offset);
1692 }
1693 break;
1694 }
1695
1696 case Primitive::kPrimDouble: {
1697 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
1698 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1699 DCHECK(locations->InAt(2).IsFpuRegister());
1700 if (index.IsConstant()) {
1701 size_t offset =
1702 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1703 __ StoreFpuToOffset(kStoreDoubleword, value, obj, offset);
1704 } else {
1705 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1706 __ Daddu(TMP, obj, TMP);
1707 __ StoreFpuToOffset(kStoreDoubleword, value, TMP, data_offset);
1708 }
1709 break;
1710 }
1711
1712 case Primitive::kPrimVoid:
1713 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1714 UNREACHABLE();
1715 }
1716
1717 // Ints and objects are handled in the switch.
1718 if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
1719 codegen_->MaybeRecordImplicitNullCheck(instruction);
1720 }
1721}
1722
1723void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001724 RegisterSet caller_saves = RegisterSet::Empty();
1725 InvokeRuntimeCallingConvention calling_convention;
1726 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1727 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1728 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001729 locations->SetInAt(0, Location::RequiresRegister());
1730 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001731}
1732
1733void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
1734 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001735 BoundsCheckSlowPathMIPS64* slow_path =
1736 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001737 codegen_->AddSlowPath(slow_path);
1738
1739 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
1740 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
1741
1742 // length is limited by the maximum positive signed 32-bit integer.
1743 // Unsigned comparison of length and index checks for index < 0
1744 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001745 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001746}
1747
1748void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
1749 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1750 instruction,
1751 LocationSummary::kCallOnSlowPath);
1752 locations->SetInAt(0, Location::RequiresRegister());
1753 locations->SetInAt(1, Location::RequiresRegister());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001754 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001755 locations->AddTemp(Location::RequiresRegister());
1756}
1757
1758void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
1759 LocationSummary* locations = instruction->GetLocations();
1760 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1761 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
1762 GpuRegister obj_cls = locations->GetTemp(0).AsRegister<GpuRegister>();
1763
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001764 SlowPathCodeMIPS64* slow_path =
1765 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001766 codegen_->AddSlowPath(slow_path);
1767
1768 // TODO: avoid this check if we know obj is not null.
1769 __ Beqzc(obj, slow_path->GetExitLabel());
1770 // Compare the class of `obj` with `cls`.
1771 __ LoadFromOffset(kLoadUnsignedWord, obj_cls, obj, mirror::Object::ClassOffset().Int32Value());
1772 __ Bnec(obj_cls, cls, slow_path->GetEntryLabel());
1773 __ Bind(slow_path->GetExitLabel());
1774}
1775
1776void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
1777 LocationSummary* locations =
1778 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1779 locations->SetInAt(0, Location::RequiresRegister());
1780 if (check->HasUses()) {
1781 locations->SetOut(Location::SameAsFirstInput());
1782 }
1783}
1784
1785void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
1786 // We assume the class is not null.
1787 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
1788 check->GetLoadClass(),
1789 check,
1790 check->GetDexPc(),
1791 true);
1792 codegen_->AddSlowPath(slow_path);
1793 GenerateClassInitializationCheck(slow_path,
1794 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
1795}
1796
1797void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
1798 Primitive::Type in_type = compare->InputAt(0)->GetType();
1799
Alexey Frunze299a9392015-12-08 16:08:02 -08001800 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001801
1802 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001803 case Primitive::kPrimBoolean:
1804 case Primitive::kPrimByte:
1805 case Primitive::kPrimShort:
1806 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001807 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001808 case Primitive::kPrimLong:
1809 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001810 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001811 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1812 break;
1813
1814 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08001815 case Primitive::kPrimDouble:
1816 locations->SetInAt(0, Location::RequiresFpuRegister());
1817 locations->SetInAt(1, Location::RequiresFpuRegister());
1818 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001819 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001820
1821 default:
1822 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1823 }
1824}
1825
1826void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
1827 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001828 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001829 Primitive::Type in_type = instruction->InputAt(0)->GetType();
1830
1831 // 0 if: left == right
1832 // 1 if: left > right
1833 // -1 if: left < right
1834 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001835 case Primitive::kPrimBoolean:
1836 case Primitive::kPrimByte:
1837 case Primitive::kPrimShort:
1838 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001839 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001840 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001841 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001842 Location rhs_location = locations->InAt(1);
1843 bool use_imm = rhs_location.IsConstant();
1844 GpuRegister rhs = ZERO;
1845 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001846 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08001847 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
1848 if (value != 0) {
1849 rhs = AT;
1850 __ LoadConst64(rhs, value);
1851 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00001852 } else {
1853 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
1854 if (value != 0) {
1855 rhs = AT;
1856 __ LoadConst32(rhs, value);
1857 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001858 }
1859 } else {
1860 rhs = rhs_location.AsRegister<GpuRegister>();
1861 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001862 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08001863 __ Slt(res, rhs, lhs);
1864 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001865 break;
1866 }
1867
Alexey Frunze299a9392015-12-08 16:08:02 -08001868 case Primitive::kPrimFloat: {
1869 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1870 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1871 Mips64Label done;
1872 __ CmpEqS(FTMP, lhs, rhs);
1873 __ LoadConst32(res, 0);
1874 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001875 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001876 __ CmpLtS(FTMP, lhs, rhs);
1877 __ LoadConst32(res, -1);
1878 __ Bc1nez(FTMP, &done);
1879 __ LoadConst32(res, 1);
1880 } else {
1881 __ CmpLtS(FTMP, rhs, lhs);
1882 __ LoadConst32(res, 1);
1883 __ Bc1nez(FTMP, &done);
1884 __ LoadConst32(res, -1);
1885 }
1886 __ Bind(&done);
1887 break;
1888 }
1889
Alexey Frunze4dda3372015-06-01 18:31:49 -07001890 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08001891 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1892 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1893 Mips64Label done;
1894 __ CmpEqD(FTMP, lhs, rhs);
1895 __ LoadConst32(res, 0);
1896 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001897 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001898 __ CmpLtD(FTMP, lhs, rhs);
1899 __ LoadConst32(res, -1);
1900 __ Bc1nez(FTMP, &done);
1901 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001902 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08001903 __ CmpLtD(FTMP, rhs, lhs);
1904 __ LoadConst32(res, 1);
1905 __ Bc1nez(FTMP, &done);
1906 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001907 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001908 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001909 break;
1910 }
1911
1912 default:
1913 LOG(FATAL) << "Unimplemented compare type " << in_type;
1914 }
1915}
1916
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001917void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001918 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08001919 switch (instruction->InputAt(0)->GetType()) {
1920 default:
1921 case Primitive::kPrimLong:
1922 locations->SetInAt(0, Location::RequiresRegister());
1923 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1924 break;
1925
1926 case Primitive::kPrimFloat:
1927 case Primitive::kPrimDouble:
1928 locations->SetInAt(0, Location::RequiresFpuRegister());
1929 locations->SetInAt(1, Location::RequiresFpuRegister());
1930 break;
1931 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001932 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001933 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1934 }
1935}
1936
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001937void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001938 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001939 return;
1940 }
1941
Alexey Frunze299a9392015-12-08 16:08:02 -08001942 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001943 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001944 switch (type) {
1945 default:
1946 // Integer case.
1947 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
1948 return;
1949 case Primitive::kPrimLong:
1950 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
1951 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08001952 case Primitive::kPrimFloat:
1953 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01001954 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
1955 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001956 }
1957}
1958
Alexey Frunzec857c742015-09-23 15:12:39 -07001959void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
1960 DCHECK(instruction->IsDiv() || instruction->IsRem());
1961 Primitive::Type type = instruction->GetResultType();
1962
1963 LocationSummary* locations = instruction->GetLocations();
1964 Location second = locations->InAt(1);
1965 DCHECK(second.IsConstant());
1966
1967 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1968 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1969 int64_t imm = Int64FromConstant(second.GetConstant());
1970 DCHECK(imm == 1 || imm == -1);
1971
1972 if (instruction->IsRem()) {
1973 __ Move(out, ZERO);
1974 } else {
1975 if (imm == -1) {
1976 if (type == Primitive::kPrimInt) {
1977 __ Subu(out, ZERO, dividend);
1978 } else {
1979 DCHECK_EQ(type, Primitive::kPrimLong);
1980 __ Dsubu(out, ZERO, dividend);
1981 }
1982 } else if (out != dividend) {
1983 __ Move(out, dividend);
1984 }
1985 }
1986}
1987
1988void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
1989 DCHECK(instruction->IsDiv() || instruction->IsRem());
1990 Primitive::Type type = instruction->GetResultType();
1991
1992 LocationSummary* locations = instruction->GetLocations();
1993 Location second = locations->InAt(1);
1994 DCHECK(second.IsConstant());
1995
1996 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1997 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
1998 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00001999 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07002000 int ctz_imm = CTZ(abs_imm);
2001
2002 if (instruction->IsDiv()) {
2003 if (type == Primitive::kPrimInt) {
2004 if (ctz_imm == 1) {
2005 // Fast path for division by +/-2, which is very common.
2006 __ Srl(TMP, dividend, 31);
2007 } else {
2008 __ Sra(TMP, dividend, 31);
2009 __ Srl(TMP, TMP, 32 - ctz_imm);
2010 }
2011 __ Addu(out, dividend, TMP);
2012 __ Sra(out, out, ctz_imm);
2013 if (imm < 0) {
2014 __ Subu(out, ZERO, out);
2015 }
2016 } else {
2017 DCHECK_EQ(type, Primitive::kPrimLong);
2018 if (ctz_imm == 1) {
2019 // Fast path for division by +/-2, which is very common.
2020 __ Dsrl32(TMP, dividend, 31);
2021 } else {
2022 __ Dsra32(TMP, dividend, 31);
2023 if (ctz_imm > 32) {
2024 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2025 } else {
2026 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2027 }
2028 }
2029 __ Daddu(out, dividend, TMP);
2030 if (ctz_imm < 32) {
2031 __ Dsra(out, out, ctz_imm);
2032 } else {
2033 __ Dsra32(out, out, ctz_imm - 32);
2034 }
2035 if (imm < 0) {
2036 __ Dsubu(out, ZERO, out);
2037 }
2038 }
2039 } else {
2040 if (type == Primitive::kPrimInt) {
2041 if (ctz_imm == 1) {
2042 // Fast path for modulo +/-2, which is very common.
2043 __ Sra(TMP, dividend, 31);
2044 __ Subu(out, dividend, TMP);
2045 __ Andi(out, out, 1);
2046 __ Addu(out, out, TMP);
2047 } else {
2048 __ Sra(TMP, dividend, 31);
2049 __ Srl(TMP, TMP, 32 - ctz_imm);
2050 __ Addu(out, dividend, TMP);
2051 if (IsUint<16>(abs_imm - 1)) {
2052 __ Andi(out, out, abs_imm - 1);
2053 } else {
2054 __ Sll(out, out, 32 - ctz_imm);
2055 __ Srl(out, out, 32 - ctz_imm);
2056 }
2057 __ Subu(out, out, TMP);
2058 }
2059 } else {
2060 DCHECK_EQ(type, Primitive::kPrimLong);
2061 if (ctz_imm == 1) {
2062 // Fast path for modulo +/-2, which is very common.
2063 __ Dsra32(TMP, dividend, 31);
2064 __ Dsubu(out, dividend, TMP);
2065 __ Andi(out, out, 1);
2066 __ Daddu(out, out, TMP);
2067 } else {
2068 __ Dsra32(TMP, dividend, 31);
2069 if (ctz_imm > 32) {
2070 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2071 } else {
2072 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2073 }
2074 __ Daddu(out, dividend, TMP);
2075 if (IsUint<16>(abs_imm - 1)) {
2076 __ Andi(out, out, abs_imm - 1);
2077 } else {
2078 if (ctz_imm > 32) {
2079 __ Dsll(out, out, 64 - ctz_imm);
2080 __ Dsrl(out, out, 64 - ctz_imm);
2081 } else {
2082 __ Dsll32(out, out, 32 - ctz_imm);
2083 __ Dsrl32(out, out, 32 - ctz_imm);
2084 }
2085 }
2086 __ Dsubu(out, out, TMP);
2087 }
2088 }
2089 }
2090}
2091
2092void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2093 DCHECK(instruction->IsDiv() || instruction->IsRem());
2094
2095 LocationSummary* locations = instruction->GetLocations();
2096 Location second = locations->InAt(1);
2097 DCHECK(second.IsConstant());
2098
2099 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2100 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2101 int64_t imm = Int64FromConstant(second.GetConstant());
2102
2103 Primitive::Type type = instruction->GetResultType();
2104 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2105
2106 int64_t magic;
2107 int shift;
2108 CalculateMagicAndShiftForDivRem(imm,
2109 (type == Primitive::kPrimLong),
2110 &magic,
2111 &shift);
2112
2113 if (type == Primitive::kPrimInt) {
2114 __ LoadConst32(TMP, magic);
2115 __ MuhR6(TMP, dividend, TMP);
2116
2117 if (imm > 0 && magic < 0) {
2118 __ Addu(TMP, TMP, dividend);
2119 } else if (imm < 0 && magic > 0) {
2120 __ Subu(TMP, TMP, dividend);
2121 }
2122
2123 if (shift != 0) {
2124 __ Sra(TMP, TMP, shift);
2125 }
2126
2127 if (instruction->IsDiv()) {
2128 __ Sra(out, TMP, 31);
2129 __ Subu(out, TMP, out);
2130 } else {
2131 __ Sra(AT, TMP, 31);
2132 __ Subu(AT, TMP, AT);
2133 __ LoadConst32(TMP, imm);
2134 __ MulR6(TMP, AT, TMP);
2135 __ Subu(out, dividend, TMP);
2136 }
2137 } else {
2138 __ LoadConst64(TMP, magic);
2139 __ Dmuh(TMP, dividend, TMP);
2140
2141 if (imm > 0 && magic < 0) {
2142 __ Daddu(TMP, TMP, dividend);
2143 } else if (imm < 0 && magic > 0) {
2144 __ Dsubu(TMP, TMP, dividend);
2145 }
2146
2147 if (shift >= 32) {
2148 __ Dsra32(TMP, TMP, shift - 32);
2149 } else if (shift > 0) {
2150 __ Dsra(TMP, TMP, shift);
2151 }
2152
2153 if (instruction->IsDiv()) {
2154 __ Dsra32(out, TMP, 31);
2155 __ Dsubu(out, TMP, out);
2156 } else {
2157 __ Dsra32(AT, TMP, 31);
2158 __ Dsubu(AT, TMP, AT);
2159 __ LoadConst64(TMP, imm);
2160 __ Dmul(TMP, AT, TMP);
2161 __ Dsubu(out, dividend, TMP);
2162 }
2163 }
2164}
2165
2166void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2167 DCHECK(instruction->IsDiv() || instruction->IsRem());
2168 Primitive::Type type = instruction->GetResultType();
2169 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2170
2171 LocationSummary* locations = instruction->GetLocations();
2172 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2173 Location second = locations->InAt(1);
2174
2175 if (second.IsConstant()) {
2176 int64_t imm = Int64FromConstant(second.GetConstant());
2177 if (imm == 0) {
2178 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2179 } else if (imm == 1 || imm == -1) {
2180 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002181 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07002182 DivRemByPowerOfTwo(instruction);
2183 } else {
2184 DCHECK(imm <= -2 || imm >= 2);
2185 GenerateDivRemWithAnyConstant(instruction);
2186 }
2187 } else {
2188 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2189 GpuRegister divisor = second.AsRegister<GpuRegister>();
2190 if (instruction->IsDiv()) {
2191 if (type == Primitive::kPrimInt)
2192 __ DivR6(out, dividend, divisor);
2193 else
2194 __ Ddiv(out, dividend, divisor);
2195 } else {
2196 if (type == Primitive::kPrimInt)
2197 __ ModR6(out, dividend, divisor);
2198 else
2199 __ Dmod(out, dividend, divisor);
2200 }
2201 }
2202}
2203
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
2205 LocationSummary* locations =
2206 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2207 switch (div->GetResultType()) {
2208 case Primitive::kPrimInt:
2209 case Primitive::kPrimLong:
2210 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07002211 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002212 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2213 break;
2214
2215 case Primitive::kPrimFloat:
2216 case Primitive::kPrimDouble:
2217 locations->SetInAt(0, Location::RequiresFpuRegister());
2218 locations->SetInAt(1, Location::RequiresFpuRegister());
2219 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2220 break;
2221
2222 default:
2223 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2224 }
2225}
2226
2227void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
2228 Primitive::Type type = instruction->GetType();
2229 LocationSummary* locations = instruction->GetLocations();
2230
2231 switch (type) {
2232 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07002233 case Primitive::kPrimLong:
2234 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002235 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002236 case Primitive::kPrimFloat:
2237 case Primitive::kPrimDouble: {
2238 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2239 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2240 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2241 if (type == Primitive::kPrimFloat)
2242 __ DivS(dst, lhs, rhs);
2243 else
2244 __ DivD(dst, lhs, rhs);
2245 break;
2246 }
2247 default:
2248 LOG(FATAL) << "Unexpected div type " << type;
2249 }
2250}
2251
2252void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002253 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002254 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002255}
2256
2257void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2258 SlowPathCodeMIPS64* slow_path =
2259 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
2260 codegen_->AddSlowPath(slow_path);
2261 Location value = instruction->GetLocations()->InAt(0);
2262
2263 Primitive::Type type = instruction->GetType();
2264
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002265 if (!Primitive::IsIntegralType(type)) {
2266 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002267 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002268 }
2269
2270 if (value.IsConstant()) {
2271 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
2272 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002273 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002274 } else {
2275 // A division by a non-null constant is valid. We don't need to perform
2276 // any check, so simply fall through.
2277 }
2278 } else {
2279 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2280 }
2281}
2282
2283void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
2284 LocationSummary* locations =
2285 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2286 locations->SetOut(Location::ConstantLocation(constant));
2287}
2288
2289void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
2290 // Will be generated at use site.
2291}
2292
2293void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
2294 exit->SetLocations(nullptr);
2295}
2296
2297void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2298}
2299
2300void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
2301 LocationSummary* locations =
2302 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2303 locations->SetOut(Location::ConstantLocation(constant));
2304}
2305
2306void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
2307 // Will be generated at use site.
2308}
2309
David Brazdilfc6a86a2015-06-26 10:33:45 +00002310void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002311 DCHECK(!successor->IsExitBlock());
2312 HBasicBlock* block = got->GetBlock();
2313 HInstruction* previous = got->GetPrevious();
2314 HLoopInformation* info = block->GetLoopInformation();
2315
2316 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
2317 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2318 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2319 return;
2320 }
2321 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2322 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2323 }
2324 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002325 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002326 }
2327}
2328
David Brazdilfc6a86a2015-06-26 10:33:45 +00002329void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
2330 got->SetLocations(nullptr);
2331}
2332
2333void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
2334 HandleGoto(got, got->GetSuccessor());
2335}
2336
2337void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2338 try_boundary->SetLocations(nullptr);
2339}
2340
2341void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2342 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2343 if (!successor->IsExitBlock()) {
2344 HandleGoto(try_boundary, successor);
2345 }
2346}
2347
Alexey Frunze299a9392015-12-08 16:08:02 -08002348void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
2349 bool is64bit,
2350 LocationSummary* locations) {
2351 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2352 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2353 Location rhs_location = locations->InAt(1);
2354 GpuRegister rhs_reg = ZERO;
2355 int64_t rhs_imm = 0;
2356 bool use_imm = rhs_location.IsConstant();
2357 if (use_imm) {
2358 if (is64bit) {
2359 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2360 } else {
2361 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2362 }
2363 } else {
2364 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2365 }
2366 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
2367
2368 switch (cond) {
2369 case kCondEQ:
2370 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01002371 if (use_imm && IsInt<16>(-rhs_imm)) {
2372 if (rhs_imm == 0) {
2373 if (cond == kCondEQ) {
2374 __ Sltiu(dst, lhs, 1);
2375 } else {
2376 __ Sltu(dst, ZERO, lhs);
2377 }
2378 } else {
2379 if (is64bit) {
2380 __ Daddiu(dst, lhs, -rhs_imm);
2381 } else {
2382 __ Addiu(dst, lhs, -rhs_imm);
2383 }
2384 if (cond == kCondEQ) {
2385 __ Sltiu(dst, dst, 1);
2386 } else {
2387 __ Sltu(dst, ZERO, dst);
2388 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002389 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002390 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01002391 if (use_imm && IsUint<16>(rhs_imm)) {
2392 __ Xori(dst, lhs, rhs_imm);
2393 } else {
2394 if (use_imm) {
2395 rhs_reg = TMP;
2396 __ LoadConst64(rhs_reg, rhs_imm);
2397 }
2398 __ Xor(dst, lhs, rhs_reg);
2399 }
2400 if (cond == kCondEQ) {
2401 __ Sltiu(dst, dst, 1);
2402 } else {
2403 __ Sltu(dst, ZERO, dst);
2404 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002405 }
2406 break;
2407
2408 case kCondLT:
2409 case kCondGE:
2410 if (use_imm && IsInt<16>(rhs_imm)) {
2411 __ Slti(dst, lhs, rhs_imm);
2412 } else {
2413 if (use_imm) {
2414 rhs_reg = TMP;
2415 __ LoadConst64(rhs_reg, rhs_imm);
2416 }
2417 __ Slt(dst, lhs, rhs_reg);
2418 }
2419 if (cond == kCondGE) {
2420 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2421 // only the slt instruction but no sge.
2422 __ Xori(dst, dst, 1);
2423 }
2424 break;
2425
2426 case kCondLE:
2427 case kCondGT:
2428 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
2429 // Simulate lhs <= rhs via lhs < rhs + 1.
2430 __ Slti(dst, lhs, rhs_imm_plus_one);
2431 if (cond == kCondGT) {
2432 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2433 // only the slti instruction but no sgti.
2434 __ Xori(dst, dst, 1);
2435 }
2436 } else {
2437 if (use_imm) {
2438 rhs_reg = TMP;
2439 __ LoadConst64(rhs_reg, rhs_imm);
2440 }
2441 __ Slt(dst, rhs_reg, lhs);
2442 if (cond == kCondLE) {
2443 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2444 // only the slt instruction but no sle.
2445 __ Xori(dst, dst, 1);
2446 }
2447 }
2448 break;
2449
2450 case kCondB:
2451 case kCondAE:
2452 if (use_imm && IsInt<16>(rhs_imm)) {
2453 // Sltiu sign-extends its 16-bit immediate operand before
2454 // the comparison and thus lets us compare directly with
2455 // unsigned values in the ranges [0, 0x7fff] and
2456 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2457 __ Sltiu(dst, lhs, rhs_imm);
2458 } else {
2459 if (use_imm) {
2460 rhs_reg = TMP;
2461 __ LoadConst64(rhs_reg, rhs_imm);
2462 }
2463 __ Sltu(dst, lhs, rhs_reg);
2464 }
2465 if (cond == kCondAE) {
2466 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2467 // only the sltu instruction but no sgeu.
2468 __ Xori(dst, dst, 1);
2469 }
2470 break;
2471
2472 case kCondBE:
2473 case kCondA:
2474 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
2475 // Simulate lhs <= rhs via lhs < rhs + 1.
2476 // Note that this only works if rhs + 1 does not overflow
2477 // to 0, hence the check above.
2478 // Sltiu sign-extends its 16-bit immediate operand before
2479 // the comparison and thus lets us compare directly with
2480 // unsigned values in the ranges [0, 0x7fff] and
2481 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2482 __ Sltiu(dst, lhs, rhs_imm_plus_one);
2483 if (cond == kCondA) {
2484 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2485 // only the sltiu instruction but no sgtiu.
2486 __ Xori(dst, dst, 1);
2487 }
2488 } else {
2489 if (use_imm) {
2490 rhs_reg = TMP;
2491 __ LoadConst64(rhs_reg, rhs_imm);
2492 }
2493 __ Sltu(dst, rhs_reg, lhs);
2494 if (cond == kCondBE) {
2495 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2496 // only the sltu instruction but no sleu.
2497 __ Xori(dst, dst, 1);
2498 }
2499 }
2500 break;
2501 }
2502}
2503
2504void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
2505 bool is64bit,
2506 LocationSummary* locations,
2507 Mips64Label* label) {
2508 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2509 Location rhs_location = locations->InAt(1);
2510 GpuRegister rhs_reg = ZERO;
2511 int64_t rhs_imm = 0;
2512 bool use_imm = rhs_location.IsConstant();
2513 if (use_imm) {
2514 if (is64bit) {
2515 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2516 } else {
2517 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2518 }
2519 } else {
2520 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2521 }
2522
2523 if (use_imm && rhs_imm == 0) {
2524 switch (cond) {
2525 case kCondEQ:
2526 case kCondBE: // <= 0 if zero
2527 __ Beqzc(lhs, label);
2528 break;
2529 case kCondNE:
2530 case kCondA: // > 0 if non-zero
2531 __ Bnezc(lhs, label);
2532 break;
2533 case kCondLT:
2534 __ Bltzc(lhs, label);
2535 break;
2536 case kCondGE:
2537 __ Bgezc(lhs, label);
2538 break;
2539 case kCondLE:
2540 __ Blezc(lhs, label);
2541 break;
2542 case kCondGT:
2543 __ Bgtzc(lhs, label);
2544 break;
2545 case kCondB: // always false
2546 break;
2547 case kCondAE: // always true
2548 __ Bc(label);
2549 break;
2550 }
2551 } else {
2552 if (use_imm) {
2553 rhs_reg = TMP;
2554 __ LoadConst64(rhs_reg, rhs_imm);
2555 }
2556 switch (cond) {
2557 case kCondEQ:
2558 __ Beqc(lhs, rhs_reg, label);
2559 break;
2560 case kCondNE:
2561 __ Bnec(lhs, rhs_reg, label);
2562 break;
2563 case kCondLT:
2564 __ Bltc(lhs, rhs_reg, label);
2565 break;
2566 case kCondGE:
2567 __ Bgec(lhs, rhs_reg, label);
2568 break;
2569 case kCondLE:
2570 __ Bgec(rhs_reg, lhs, label);
2571 break;
2572 case kCondGT:
2573 __ Bltc(rhs_reg, lhs, label);
2574 break;
2575 case kCondB:
2576 __ Bltuc(lhs, rhs_reg, label);
2577 break;
2578 case kCondAE:
2579 __ Bgeuc(lhs, rhs_reg, label);
2580 break;
2581 case kCondBE:
2582 __ Bgeuc(rhs_reg, lhs, label);
2583 break;
2584 case kCondA:
2585 __ Bltuc(rhs_reg, lhs, label);
2586 break;
2587 }
2588 }
2589}
2590
Tijana Jakovljevic43758192016-12-30 09:23:01 +01002591void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
2592 bool gt_bias,
2593 Primitive::Type type,
2594 LocationSummary* locations) {
2595 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2596 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2597 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2598 if (type == Primitive::kPrimFloat) {
2599 switch (cond) {
2600 case kCondEQ:
2601 __ CmpEqS(FTMP, lhs, rhs);
2602 __ Mfc1(dst, FTMP);
2603 __ Andi(dst, dst, 1);
2604 break;
2605 case kCondNE:
2606 __ CmpEqS(FTMP, lhs, rhs);
2607 __ Mfc1(dst, FTMP);
2608 __ Addiu(dst, dst, 1);
2609 break;
2610 case kCondLT:
2611 if (gt_bias) {
2612 __ CmpLtS(FTMP, lhs, rhs);
2613 } else {
2614 __ CmpUltS(FTMP, lhs, rhs);
2615 }
2616 __ Mfc1(dst, FTMP);
2617 __ Andi(dst, dst, 1);
2618 break;
2619 case kCondLE:
2620 if (gt_bias) {
2621 __ CmpLeS(FTMP, lhs, rhs);
2622 } else {
2623 __ CmpUleS(FTMP, lhs, rhs);
2624 }
2625 __ Mfc1(dst, FTMP);
2626 __ Andi(dst, dst, 1);
2627 break;
2628 case kCondGT:
2629 if (gt_bias) {
2630 __ CmpUltS(FTMP, rhs, lhs);
2631 } else {
2632 __ CmpLtS(FTMP, rhs, lhs);
2633 }
2634 __ Mfc1(dst, FTMP);
2635 __ Andi(dst, dst, 1);
2636 break;
2637 case kCondGE:
2638 if (gt_bias) {
2639 __ CmpUleS(FTMP, rhs, lhs);
2640 } else {
2641 __ CmpLeS(FTMP, rhs, lhs);
2642 }
2643 __ Mfc1(dst, FTMP);
2644 __ Andi(dst, dst, 1);
2645 break;
2646 default:
2647 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
2648 UNREACHABLE();
2649 }
2650 } else {
2651 DCHECK_EQ(type, Primitive::kPrimDouble);
2652 switch (cond) {
2653 case kCondEQ:
2654 __ CmpEqD(FTMP, lhs, rhs);
2655 __ Mfc1(dst, FTMP);
2656 __ Andi(dst, dst, 1);
2657 break;
2658 case kCondNE:
2659 __ CmpEqD(FTMP, lhs, rhs);
2660 __ Mfc1(dst, FTMP);
2661 __ Addiu(dst, dst, 1);
2662 break;
2663 case kCondLT:
2664 if (gt_bias) {
2665 __ CmpLtD(FTMP, lhs, rhs);
2666 } else {
2667 __ CmpUltD(FTMP, lhs, rhs);
2668 }
2669 __ Mfc1(dst, FTMP);
2670 __ Andi(dst, dst, 1);
2671 break;
2672 case kCondLE:
2673 if (gt_bias) {
2674 __ CmpLeD(FTMP, lhs, rhs);
2675 } else {
2676 __ CmpUleD(FTMP, lhs, rhs);
2677 }
2678 __ Mfc1(dst, FTMP);
2679 __ Andi(dst, dst, 1);
2680 break;
2681 case kCondGT:
2682 if (gt_bias) {
2683 __ CmpUltD(FTMP, rhs, lhs);
2684 } else {
2685 __ CmpLtD(FTMP, rhs, lhs);
2686 }
2687 __ Mfc1(dst, FTMP);
2688 __ Andi(dst, dst, 1);
2689 break;
2690 case kCondGE:
2691 if (gt_bias) {
2692 __ CmpUleD(FTMP, rhs, lhs);
2693 } else {
2694 __ CmpLeD(FTMP, rhs, lhs);
2695 }
2696 __ Mfc1(dst, FTMP);
2697 __ Andi(dst, dst, 1);
2698 break;
2699 default:
2700 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
2701 UNREACHABLE();
2702 }
2703 }
2704}
2705
Alexey Frunze299a9392015-12-08 16:08:02 -08002706void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
2707 bool gt_bias,
2708 Primitive::Type type,
2709 LocationSummary* locations,
2710 Mips64Label* label) {
2711 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2712 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2713 if (type == Primitive::kPrimFloat) {
2714 switch (cond) {
2715 case kCondEQ:
2716 __ CmpEqS(FTMP, lhs, rhs);
2717 __ Bc1nez(FTMP, label);
2718 break;
2719 case kCondNE:
2720 __ CmpEqS(FTMP, lhs, rhs);
2721 __ Bc1eqz(FTMP, label);
2722 break;
2723 case kCondLT:
2724 if (gt_bias) {
2725 __ CmpLtS(FTMP, lhs, rhs);
2726 } else {
2727 __ CmpUltS(FTMP, lhs, rhs);
2728 }
2729 __ Bc1nez(FTMP, label);
2730 break;
2731 case kCondLE:
2732 if (gt_bias) {
2733 __ CmpLeS(FTMP, lhs, rhs);
2734 } else {
2735 __ CmpUleS(FTMP, lhs, rhs);
2736 }
2737 __ Bc1nez(FTMP, label);
2738 break;
2739 case kCondGT:
2740 if (gt_bias) {
2741 __ CmpUltS(FTMP, rhs, lhs);
2742 } else {
2743 __ CmpLtS(FTMP, rhs, lhs);
2744 }
2745 __ Bc1nez(FTMP, label);
2746 break;
2747 case kCondGE:
2748 if (gt_bias) {
2749 __ CmpUleS(FTMP, rhs, lhs);
2750 } else {
2751 __ CmpLeS(FTMP, rhs, lhs);
2752 }
2753 __ Bc1nez(FTMP, label);
2754 break;
2755 default:
2756 LOG(FATAL) << "Unexpected non-floating-point condition";
2757 }
2758 } else {
2759 DCHECK_EQ(type, Primitive::kPrimDouble);
2760 switch (cond) {
2761 case kCondEQ:
2762 __ CmpEqD(FTMP, lhs, rhs);
2763 __ Bc1nez(FTMP, label);
2764 break;
2765 case kCondNE:
2766 __ CmpEqD(FTMP, lhs, rhs);
2767 __ Bc1eqz(FTMP, label);
2768 break;
2769 case kCondLT:
2770 if (gt_bias) {
2771 __ CmpLtD(FTMP, lhs, rhs);
2772 } else {
2773 __ CmpUltD(FTMP, lhs, rhs);
2774 }
2775 __ Bc1nez(FTMP, label);
2776 break;
2777 case kCondLE:
2778 if (gt_bias) {
2779 __ CmpLeD(FTMP, lhs, rhs);
2780 } else {
2781 __ CmpUleD(FTMP, lhs, rhs);
2782 }
2783 __ Bc1nez(FTMP, label);
2784 break;
2785 case kCondGT:
2786 if (gt_bias) {
2787 __ CmpUltD(FTMP, rhs, lhs);
2788 } else {
2789 __ CmpLtD(FTMP, rhs, lhs);
2790 }
2791 __ Bc1nez(FTMP, label);
2792 break;
2793 case kCondGE:
2794 if (gt_bias) {
2795 __ CmpUleD(FTMP, rhs, lhs);
2796 } else {
2797 __ CmpLeD(FTMP, rhs, lhs);
2798 }
2799 __ Bc1nez(FTMP, label);
2800 break;
2801 default:
2802 LOG(FATAL) << "Unexpected non-floating-point condition";
2803 }
2804 }
2805}
2806
Alexey Frunze4dda3372015-06-01 18:31:49 -07002807void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002808 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002809 Mips64Label* true_target,
2810 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002811 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002812
David Brazdil0debae72015-11-12 18:37:00 +00002813 if (true_target == nullptr && false_target == nullptr) {
2814 // Nothing to do. The code always falls through.
2815 return;
2816 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002817 // Constant condition, statically compared against "true" (integer value 1).
2818 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002819 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002820 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002821 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002822 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002823 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002824 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002825 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002826 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002827 }
David Brazdil0debae72015-11-12 18:37:00 +00002828 return;
2829 }
2830
2831 // The following code generates these patterns:
2832 // (1) true_target == nullptr && false_target != nullptr
2833 // - opposite condition true => branch to false_target
2834 // (2) true_target != nullptr && false_target == nullptr
2835 // - condition true => branch to true_target
2836 // (3) true_target != nullptr && false_target != nullptr
2837 // - condition true => branch to true_target
2838 // - branch to false_target
2839 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002840 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002841 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002842 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002843 if (true_target == nullptr) {
2844 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
2845 } else {
2846 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
2847 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002848 } else {
2849 // The condition instruction has not been materialized, use its inputs as
2850 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002851 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002852 Primitive::Type type = condition->InputAt(0)->GetType();
2853 LocationSummary* locations = cond->GetLocations();
2854 IfCondition if_cond = condition->GetCondition();
2855 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00002856
David Brazdil0debae72015-11-12 18:37:00 +00002857 if (true_target == nullptr) {
2858 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002859 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002860 }
2861
Alexey Frunze299a9392015-12-08 16:08:02 -08002862 switch (type) {
2863 default:
2864 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
2865 break;
2866 case Primitive::kPrimLong:
2867 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
2868 break;
2869 case Primitive::kPrimFloat:
2870 case Primitive::kPrimDouble:
2871 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
2872 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002873 }
2874 }
David Brazdil0debae72015-11-12 18:37:00 +00002875
2876 // If neither branch falls through (case 3), the conditional branch to `true_target`
2877 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2878 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002879 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002880 }
2881}
2882
2883void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
2884 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002885 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002886 locations->SetInAt(0, Location::RequiresRegister());
2887 }
2888}
2889
2890void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002891 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2892 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002893 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002894 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002895 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002896 nullptr : codegen_->GetLabelOf(false_successor);
2897 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002898}
2899
2900void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
2901 LocationSummary* locations = new (GetGraph()->GetArena())
2902 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002903 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002904 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002905 locations->SetInAt(0, Location::RequiresRegister());
2906 }
2907}
2908
2909void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002910 SlowPathCodeMIPS64* slow_path =
2911 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002912 GenerateTestAndBranch(deoptimize,
2913 /* condition_input_index */ 0,
2914 slow_path->GetEntryLabel(),
2915 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002916}
2917
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002918void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2919 LocationSummary* locations = new (GetGraph()->GetArena())
2920 LocationSummary(flag, LocationSummary::kNoCall);
2921 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07002922}
2923
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002924void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2925 __ LoadFromOffset(kLoadWord,
2926 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
2927 SP,
2928 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07002929}
2930
David Brazdil74eb1b22015-12-14 11:44:01 +00002931void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
2932 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
2933 if (Primitive::IsFloatingPointType(select->GetType())) {
2934 locations->SetInAt(0, Location::RequiresFpuRegister());
2935 locations->SetInAt(1, Location::RequiresFpuRegister());
2936 } else {
2937 locations->SetInAt(0, Location::RequiresRegister());
2938 locations->SetInAt(1, Location::RequiresRegister());
2939 }
2940 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2941 locations->SetInAt(2, Location::RequiresRegister());
2942 }
2943 locations->SetOut(Location::SameAsFirstInput());
2944}
2945
2946void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
2947 LocationSummary* locations = select->GetLocations();
2948 Mips64Label false_target;
2949 GenerateTestAndBranch(select,
2950 /* condition_input_index */ 2,
2951 /* true_target */ nullptr,
2952 &false_target);
2953 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2954 __ Bind(&false_target);
2955}
2956
David Srbecky0cf44932015-12-09 14:09:59 +00002957void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
2958 new (GetGraph()->GetArena()) LocationSummary(info);
2959}
2960
David Srbeckyd28f4a02016-03-14 17:14:24 +00002961void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2962 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002963}
2964
2965void CodeGeneratorMIPS64::GenerateNop() {
2966 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002967}
2968
Alexey Frunze4dda3372015-06-01 18:31:49 -07002969void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
2970 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2971 LocationSummary* locations =
2972 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2973 locations->SetInAt(0, Location::RequiresRegister());
2974 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2975 locations->SetOut(Location::RequiresFpuRegister());
2976 } else {
2977 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2978 }
2979}
2980
2981void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
2982 const FieldInfo& field_info) {
2983 Primitive::Type type = field_info.GetFieldType();
2984 LocationSummary* locations = instruction->GetLocations();
2985 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2986 LoadOperandType load_type = kLoadUnsignedByte;
2987 switch (type) {
2988 case Primitive::kPrimBoolean:
2989 load_type = kLoadUnsignedByte;
2990 break;
2991 case Primitive::kPrimByte:
2992 load_type = kLoadSignedByte;
2993 break;
2994 case Primitive::kPrimShort:
2995 load_type = kLoadSignedHalfword;
2996 break;
2997 case Primitive::kPrimChar:
2998 load_type = kLoadUnsignedHalfword;
2999 break;
3000 case Primitive::kPrimInt:
3001 case Primitive::kPrimFloat:
3002 load_type = kLoadWord;
3003 break;
3004 case Primitive::kPrimLong:
3005 case Primitive::kPrimDouble:
3006 load_type = kLoadDoubleword;
3007 break;
3008 case Primitive::kPrimNot:
3009 load_type = kLoadUnsignedWord;
3010 break;
3011 case Primitive::kPrimVoid:
3012 LOG(FATAL) << "Unreachable type " << type;
3013 UNREACHABLE();
3014 }
3015 if (!Primitive::IsFloatingPointType(type)) {
3016 DCHECK(locations->Out().IsRegister());
3017 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3018 __ LoadFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
3019 } else {
3020 DCHECK(locations->Out().IsFpuRegister());
3021 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3022 __ LoadFpuFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
3023 }
3024
3025 codegen_->MaybeRecordImplicitNullCheck(instruction);
3026 // TODO: memory barrier?
3027}
3028
3029void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
3030 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
3031 LocationSummary* locations =
3032 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3033 locations->SetInAt(0, Location::RequiresRegister());
3034 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
3035 locations->SetInAt(1, Location::RequiresFpuRegister());
3036 } else {
3037 locations->SetInAt(1, Location::RequiresRegister());
3038 }
3039}
3040
3041void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003042 const FieldInfo& field_info,
3043 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003044 Primitive::Type type = field_info.GetFieldType();
3045 LocationSummary* locations = instruction->GetLocations();
3046 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
3047 StoreOperandType store_type = kStoreByte;
3048 switch (type) {
3049 case Primitive::kPrimBoolean:
3050 case Primitive::kPrimByte:
3051 store_type = kStoreByte;
3052 break;
3053 case Primitive::kPrimShort:
3054 case Primitive::kPrimChar:
3055 store_type = kStoreHalfword;
3056 break;
3057 case Primitive::kPrimInt:
3058 case Primitive::kPrimFloat:
3059 case Primitive::kPrimNot:
3060 store_type = kStoreWord;
3061 break;
3062 case Primitive::kPrimLong:
3063 case Primitive::kPrimDouble:
3064 store_type = kStoreDoubleword;
3065 break;
3066 case Primitive::kPrimVoid:
3067 LOG(FATAL) << "Unreachable type " << type;
3068 UNREACHABLE();
3069 }
3070 if (!Primitive::IsFloatingPointType(type)) {
3071 DCHECK(locations->InAt(1).IsRegister());
3072 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
3073 __ StoreToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
3074 } else {
3075 DCHECK(locations->InAt(1).IsFpuRegister());
3076 FpuRegister src = locations->InAt(1).AsFpuRegister<FpuRegister>();
3077 __ StoreFpuToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
3078 }
3079
3080 codegen_->MaybeRecordImplicitNullCheck(instruction);
3081 // TODO: memory barriers?
3082 if (CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1))) {
3083 DCHECK(locations->InAt(1).IsRegister());
3084 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003085 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003086 }
3087}
3088
3089void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3090 HandleFieldGet(instruction, instruction->GetFieldInfo());
3091}
3092
3093void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3094 HandleFieldGet(instruction, instruction->GetFieldInfo());
3095}
3096
3097void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3098 HandleFieldSet(instruction, instruction->GetFieldInfo());
3099}
3100
3101void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003102 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003103}
3104
Alexey Frunzef63f5692016-12-13 17:43:11 -08003105void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
3106 HInstruction* instruction ATTRIBUTE_UNUSED,
3107 Location root,
3108 GpuRegister obj,
3109 uint32_t offset) {
Vladimir Marko48886c22017-01-06 11:45:47 +00003110 // When handling PC-relative loads, the caller calls
Alexey Frunzef63f5692016-12-13 17:43:11 -08003111 // EmitPcRelativeAddressPlaceholderHigh() and then GenerateGcRootFieldLoad().
3112 // The relative patcher expects the two methods to emit the following patchable
3113 // sequence of instructions in this case:
3114 // auipc reg1, 0x1234 // 0x1234 is a placeholder for offset_high.
3115 // lwu reg2, 0x5678(reg1) // 0x5678 is a placeholder for offset_low.
3116 // TODO: Adjust GenerateGcRootFieldLoad() and its caller when this method is
3117 // extended (e.g. for read barriers) so as not to break the relative patcher.
3118 GpuRegister root_reg = root.AsRegister<GpuRegister>();
3119 if (kEmitCompilerReadBarrier) {
3120 UNIMPLEMENTED(FATAL) << "for read barrier";
3121 } else {
3122 // Plain GC root load with no read barrier.
3123 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
3124 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
3125 // Note that GC roots are not affected by heap poisoning, thus we
3126 // do not have to unpoison `root_reg` here.
3127 }
3128}
3129
Alexey Frunze4dda3372015-06-01 18:31:49 -07003130void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3131 LocationSummary::CallKind call_kind =
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003132 instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003133 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3134 locations->SetInAt(0, Location::RequiresRegister());
3135 locations->SetInAt(1, Location::RequiresRegister());
3136 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003137 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07003138 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3139}
3140
3141void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3142 LocationSummary* locations = instruction->GetLocations();
3143 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
3144 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
3145 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3146
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003147 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003148
3149 // Return 0 if `obj` is null.
3150 // TODO: Avoid this check if we know `obj` is not null.
3151 __ Move(out, ZERO);
3152 __ Beqzc(obj, &done);
3153
3154 // Compare the class of `obj` with `cls`.
3155 __ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003156 if (instruction->IsExactCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003157 // Classes must be equal for the instanceof to succeed.
3158 __ Xor(out, out, cls);
3159 __ Sltiu(out, out, 1);
3160 } else {
3161 // If the classes are not equal, we go into a slow path.
3162 DCHECK(locations->OnlyCallsOnSlowPath());
3163 SlowPathCodeMIPS64* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003164 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003165 codegen_->AddSlowPath(slow_path);
3166 __ Bnec(out, cls, slow_path->GetEntryLabel());
3167 __ LoadConst32(out, 1);
3168 __ Bind(slow_path->GetExitLabel());
3169 }
3170
3171 __ Bind(&done);
3172}
3173
3174void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
3175 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3176 locations->SetOut(Location::ConstantLocation(constant));
3177}
3178
3179void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
3180 // Will be generated at use site.
3181}
3182
3183void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
3184 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3185 locations->SetOut(Location::ConstantLocation(constant));
3186}
3187
3188void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
3189 // Will be generated at use site.
3190}
3191
Calin Juravle175dc732015-08-25 15:42:32 +01003192void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3193 // The trampoline uses the same calling convention as dex calling conventions,
3194 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3195 // the method_idx.
3196 HandleInvoke(invoke);
3197}
3198
3199void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3200 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3201}
3202
Alexey Frunze4dda3372015-06-01 18:31:49 -07003203void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
3204 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
3205 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3206}
3207
3208void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3209 HandleInvoke(invoke);
3210 // The register T0 is required to be used for the hidden argument in
3211 // art_quick_imt_conflict_trampoline, so add the hidden argument.
3212 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
3213}
3214
3215void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3216 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3217 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003218 Location receiver = invoke->GetLocations()->InAt(0);
3219 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003220 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003221
3222 // Set the hidden argument.
3223 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
3224 invoke->GetDexMethodIndex());
3225
3226 // temp = object->GetClass();
3227 if (receiver.IsStackSlot()) {
3228 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
3229 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
3230 } else {
3231 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
3232 }
3233 codegen_->MaybeRecordImplicitNullCheck(invoke);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003234 __ LoadFromOffset(kLoadDoubleword, temp, temp,
3235 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
3236 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003237 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003238 // temp = temp->GetImtEntryAt(method_offset);
3239 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3240 // T9 = temp->GetEntryPoint();
3241 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3242 // T9();
3243 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003244 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003245 DCHECK(!codegen_->IsLeafMethod());
3246 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3247}
3248
3249void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07003250 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3251 if (intrinsic.TryDispatch(invoke)) {
3252 return;
3253 }
3254
Alexey Frunze4dda3372015-06-01 18:31:49 -07003255 HandleInvoke(invoke);
3256}
3257
3258void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003259 // Explicit clinit checks triggered by static invokes must have been pruned by
3260 // art::PrepareForRegisterAllocation.
3261 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003262
Chris Larsen3039e382015-08-26 07:54:08 -07003263 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3264 if (intrinsic.TryDispatch(invoke)) {
3265 return;
3266 }
3267
Alexey Frunze4dda3372015-06-01 18:31:49 -07003268 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003269}
3270
Orion Hodsonac141392017-01-13 11:53:47 +00003271void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3272 HandleInvoke(invoke);
3273}
3274
3275void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
3276 codegen_->GenerateInvokePolymorphicCall(invoke);
3277}
3278
Chris Larsen3039e382015-08-26 07:54:08 -07003279static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003280 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07003281 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
3282 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003283 return true;
3284 }
3285 return false;
3286}
3287
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003288HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08003289 HLoadString::LoadKind desired_string_load_kind) {
3290 if (kEmitCompilerReadBarrier) {
3291 UNIMPLEMENTED(FATAL) << "for read barrier";
3292 }
3293 bool fallback_load = false;
3294 switch (desired_string_load_kind) {
3295 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3296 DCHECK(!GetCompilerOptions().GetCompilePic());
3297 break;
3298 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3299 DCHECK(GetCompilerOptions().GetCompilePic());
3300 break;
3301 case HLoadString::LoadKind::kBootImageAddress:
3302 break;
3303 case HLoadString::LoadKind::kBssEntry:
3304 DCHECK(!Runtime::Current()->UseJitCompilation());
3305 break;
3306 case HLoadString::LoadKind::kDexCacheViaMethod:
3307 break;
3308 case HLoadString::LoadKind::kJitTableAddress:
3309 DCHECK(Runtime::Current()->UseJitCompilation());
3310 // TODO: implement.
3311 fallback_load = true;
3312 break;
3313 }
3314 if (fallback_load) {
3315 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
3316 }
3317 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003318}
3319
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003320HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
3321 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003322 if (kEmitCompilerReadBarrier) {
3323 UNIMPLEMENTED(FATAL) << "for read barrier";
3324 }
3325 bool fallback_load = false;
3326 switch (desired_class_load_kind) {
3327 case HLoadClass::LoadKind::kReferrersClass:
3328 break;
3329 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3330 DCHECK(!GetCompilerOptions().GetCompilePic());
3331 break;
3332 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3333 DCHECK(GetCompilerOptions().GetCompilePic());
3334 break;
3335 case HLoadClass::LoadKind::kBootImageAddress:
3336 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003337 case HLoadClass::LoadKind::kBssEntry:
3338 DCHECK(!Runtime::Current()->UseJitCompilation());
3339 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08003340 case HLoadClass::LoadKind::kJitTableAddress:
3341 DCHECK(Runtime::Current()->UseJitCompilation());
3342 // TODO: implement.
3343 fallback_load = true;
3344 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08003345 case HLoadClass::LoadKind::kDexCacheViaMethod:
3346 break;
3347 }
3348 if (fallback_load) {
3349 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
3350 }
3351 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003352}
3353
Vladimir Markodc151b22015-10-15 18:02:30 +01003354HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
3355 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003356 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08003357 // On MIPS64 we support all dispatch types.
3358 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01003359}
3360
Alexey Frunze4dda3372015-06-01 18:31:49 -07003361void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
3362 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003363 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08003364 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
3365 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
3366
Alexey Frunze19f6c692016-11-30 19:19:55 -08003367 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003368 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00003369 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003370 uint32_t offset =
3371 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003372 __ LoadFromOffset(kLoadDoubleword,
3373 temp.AsRegister<GpuRegister>(),
3374 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003375 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003376 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003377 }
Vladimir Marko58155012015-08-19 12:49:41 +00003378 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003379 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003380 break;
3381 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003382 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3383 kLoadDoubleword,
3384 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003385 break;
Alexey Frunze19f6c692016-11-30 19:19:55 -08003386 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3387 uint32_t offset = invoke->GetDexCacheArrayOffset();
3388 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3389 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset);
3390 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3391 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
3392 break;
3393 }
Vladimir Marko58155012015-08-19 12:49:41 +00003394 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003395 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003396 GpuRegister reg = temp.AsRegister<GpuRegister>();
3397 GpuRegister method_reg;
3398 if (current_method.IsRegister()) {
3399 method_reg = current_method.AsRegister<GpuRegister>();
3400 } else {
3401 // TODO: use the appropriate DCHECK() here if possible.
3402 // DCHECK(invoke->GetLocations()->Intrinsified());
3403 DCHECK(!current_method.IsValid());
3404 method_reg = reg;
3405 __ Ld(reg, SP, kCurrentMethodStackOffset);
3406 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003407
Vladimir Marko58155012015-08-19 12:49:41 +00003408 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003409 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00003410 reg,
3411 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01003412 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01003413 // temp = temp[index_in_cache];
3414 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3415 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003416 __ LoadFromOffset(kLoadDoubleword,
3417 reg,
3418 reg,
3419 CodeGenerator::GetCachePointerOffset(index_in_cache));
3420 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003421 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003422 }
3423
Alexey Frunze19f6c692016-11-30 19:19:55 -08003424 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00003425 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003426 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00003427 break;
Vladimir Marko58155012015-08-19 12:49:41 +00003428 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3429 // T9 = callee_method->entry_point_from_quick_compiled_code_;
3430 __ LoadFromOffset(kLoadDoubleword,
3431 T9,
3432 callee_method.AsRegister<GpuRegister>(),
3433 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07003434 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00003435 // T9()
3436 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003437 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003438 break;
3439 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003440 DCHECK(!IsLeafMethod());
3441}
3442
3443void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003444 // Explicit clinit checks triggered by static invokes must have been pruned by
3445 // art::PrepareForRegisterAllocation.
3446 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003447
3448 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3449 return;
3450 }
3451
3452 LocationSummary* locations = invoke->GetLocations();
3453 codegen_->GenerateStaticOrDirectCall(invoke,
3454 locations->HasTemps()
3455 ? locations->GetTemp(0)
3456 : Location::NoLocation());
3457 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3458}
3459
Alexey Frunze53afca12015-11-05 16:34:23 -08003460void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003461 // Use the calling convention instead of the location of the receiver, as
3462 // intrinsics may have put the receiver in a different register. In the intrinsics
3463 // slow path, the arguments have been moved to the right place, so here we are
3464 // guaranteed that the receiver is the first register of the calling convention.
3465 InvokeDexCallingConvention calling_convention;
3466 GpuRegister receiver = calling_convention.GetRegisterAt(0);
3467
Alexey Frunze53afca12015-11-05 16:34:23 -08003468 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003469 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3470 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
3471 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003472 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003473
3474 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003475 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08003476 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003477 // temp = temp->GetMethodAt(method_offset);
3478 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3479 // T9 = temp->GetEntryPoint();
3480 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3481 // T9();
3482 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003483 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08003484}
3485
3486void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
3487 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3488 return;
3489 }
3490
3491 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003492 DCHECK(!codegen_->IsLeafMethod());
3493 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3494}
3495
3496void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00003497 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3498 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003499 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko41559982017-01-06 14:04:23 +00003500 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
Alexey Frunzef63f5692016-12-13 17:43:11 -08003501 cls,
3502 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko41559982017-01-06 14:04:23 +00003503 calling_convention.GetReturnLocation(Primitive::kPrimNot));
Alexey Frunzef63f5692016-12-13 17:43:11 -08003504 return;
3505 }
Vladimir Marko41559982017-01-06 14:04:23 +00003506 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003507
3508 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3509 ? LocationSummary::kCallOnSlowPath
3510 : LocationSummary::kNoCall;
3511 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Vladimir Marko41559982017-01-06 14:04:23 +00003512 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003513 locations->SetInAt(0, Location::RequiresRegister());
3514 }
3515 locations->SetOut(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003516}
3517
3518void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00003519 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3520 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3521 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01003522 return;
3523 }
Vladimir Marko41559982017-01-06 14:04:23 +00003524 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01003525
Vladimir Marko41559982017-01-06 14:04:23 +00003526 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08003527 Location out_loc = locations->Out();
3528 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3529 GpuRegister current_method_reg = ZERO;
3530 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3531 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3532 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
3533 }
3534
3535 bool generate_null_check = false;
3536 switch (load_kind) {
3537 case HLoadClass::LoadKind::kReferrersClass:
3538 DCHECK(!cls->CanCallRuntime());
3539 DCHECK(!cls->MustGenerateClinitCheck());
3540 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3541 GenerateGcRootFieldLoad(cls,
3542 out_loc,
3543 current_method_reg,
3544 ArtMethod::DeclaringClassOffset().Int32Value());
3545 break;
3546 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003547 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003548 __ LoadLiteral(out,
3549 kLoadUnsignedWord,
3550 codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
3551 cls->GetTypeIndex()));
3552 break;
3553 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003554 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003555 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3556 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
3557 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3558 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3559 break;
3560 }
3561 case HLoadClass::LoadKind::kBootImageAddress: {
3562 DCHECK(!kEmitCompilerReadBarrier);
3563 DCHECK_NE(cls->GetAddress(), 0u);
3564 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
3565 __ LoadLiteral(out,
3566 kLoadUnsignedWord,
3567 codegen_->DeduplicateBootImageAddressLiteral(address));
3568 break;
3569 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003570 case HLoadClass::LoadKind::kBssEntry: {
3571 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
3572 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3573 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
3574 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3575 __ Lwu(out, AT, /* placeholder */ 0x5678);
3576 generate_null_check = true;
3577 break;
3578 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08003579 case HLoadClass::LoadKind::kJitTableAddress: {
3580 LOG(FATAL) << "Unimplemented";
3581 break;
3582 }
Vladimir Marko41559982017-01-06 14:04:23 +00003583 case HLoadClass::LoadKind::kDexCacheViaMethod:
3584 LOG(FATAL) << "UNREACHABLE";
3585 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08003586 }
3587
3588 if (generate_null_check || cls->MustGenerateClinitCheck()) {
3589 DCHECK(cls->CanCallRuntime());
3590 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
3591 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3592 codegen_->AddSlowPath(slow_path);
3593 if (generate_null_check) {
3594 __ Beqzc(out, slow_path->GetEntryLabel());
3595 }
3596 if (cls->MustGenerateClinitCheck()) {
3597 GenerateClassInitializationCheck(slow_path, out);
3598 } else {
3599 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003600 }
3601 }
3602}
3603
David Brazdilcb1c0552015-08-04 16:22:25 +01003604static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07003605 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01003606}
3607
Alexey Frunze4dda3372015-06-01 18:31:49 -07003608void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
3609 LocationSummary* locations =
3610 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3611 locations->SetOut(Location::RequiresRegister());
3612}
3613
3614void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
3615 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01003616 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
3617}
3618
3619void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
3620 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3621}
3622
3623void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3624 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003625}
3626
Alexey Frunze4dda3372015-06-01 18:31:49 -07003627void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003628 HLoadString::LoadKind load_kind = load->GetLoadKind();
3629 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003630 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunzef63f5692016-12-13 17:43:11 -08003631 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
3632 InvokeRuntimeCallingConvention calling_convention;
3633 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
3634 } else {
3635 locations->SetOut(Location::RequiresRegister());
3636 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003637}
3638
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00003639// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
3640// move.
3641void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003642 HLoadString::LoadKind load_kind = load->GetLoadKind();
3643 LocationSummary* locations = load->GetLocations();
3644 Location out_loc = locations->Out();
3645 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3646
3647 switch (load_kind) {
3648 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003649 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003650 __ LoadLiteral(out,
3651 kLoadUnsignedWord,
3652 codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
3653 load->GetStringIndex()));
3654 return; // No dex cache slow path.
3655 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
3656 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
3657 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003658 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003659 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3660 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3661 return; // No dex cache slow path.
3662 }
3663 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00003664 uint32_t address = dchecked_integral_cast<uint32_t>(
3665 reinterpret_cast<uintptr_t>(load->GetString().Get()));
3666 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08003667 __ LoadLiteral(out,
3668 kLoadUnsignedWord,
3669 codegen_->DeduplicateBootImageAddressLiteral(address));
3670 return; // No dex cache slow path.
3671 }
3672 case HLoadString::LoadKind::kBssEntry: {
3673 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
3674 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00003675 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzef63f5692016-12-13 17:43:11 -08003676 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3677 __ Lwu(out, AT, /* placeholder */ 0x5678);
3678 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
3679 codegen_->AddSlowPath(slow_path);
3680 __ Beqzc(out, slow_path->GetEntryLabel());
3681 __ Bind(slow_path->GetExitLabel());
3682 return;
3683 }
3684 default:
3685 break;
3686 }
3687
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07003688 // TODO: Re-add the compiler code to do string dex cache lookup again.
Alexey Frunzef63f5692016-12-13 17:43:11 -08003689 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
3690 InvokeRuntimeCallingConvention calling_convention;
3691 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
3692 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
3693 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003694}
3695
Alexey Frunze4dda3372015-06-01 18:31:49 -07003696void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
3697 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3698 locations->SetOut(Location::ConstantLocation(constant));
3699}
3700
3701void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3702 // Will be generated at use site.
3703}
3704
3705void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
3706 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003707 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003708 InvokeRuntimeCallingConvention calling_convention;
3709 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3710}
3711
3712void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003713 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07003714 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01003715 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003716 if (instruction->IsEnter()) {
3717 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3718 } else {
3719 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3720 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003721}
3722
3723void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
3724 LocationSummary* locations =
3725 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3726 switch (mul->GetResultType()) {
3727 case Primitive::kPrimInt:
3728 case Primitive::kPrimLong:
3729 locations->SetInAt(0, Location::RequiresRegister());
3730 locations->SetInAt(1, Location::RequiresRegister());
3731 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3732 break;
3733
3734 case Primitive::kPrimFloat:
3735 case Primitive::kPrimDouble:
3736 locations->SetInAt(0, Location::RequiresFpuRegister());
3737 locations->SetInAt(1, Location::RequiresFpuRegister());
3738 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3739 break;
3740
3741 default:
3742 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3743 }
3744}
3745
3746void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
3747 Primitive::Type type = instruction->GetType();
3748 LocationSummary* locations = instruction->GetLocations();
3749
3750 switch (type) {
3751 case Primitive::kPrimInt:
3752 case Primitive::kPrimLong: {
3753 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3754 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3755 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
3756 if (type == Primitive::kPrimInt)
3757 __ MulR6(dst, lhs, rhs);
3758 else
3759 __ Dmul(dst, lhs, rhs);
3760 break;
3761 }
3762 case Primitive::kPrimFloat:
3763 case Primitive::kPrimDouble: {
3764 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3765 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3766 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3767 if (type == Primitive::kPrimFloat)
3768 __ MulS(dst, lhs, rhs);
3769 else
3770 __ MulD(dst, lhs, rhs);
3771 break;
3772 }
3773 default:
3774 LOG(FATAL) << "Unexpected mul type " << type;
3775 }
3776}
3777
3778void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
3779 LocationSummary* locations =
3780 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3781 switch (neg->GetResultType()) {
3782 case Primitive::kPrimInt:
3783 case Primitive::kPrimLong:
3784 locations->SetInAt(0, Location::RequiresRegister());
3785 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3786 break;
3787
3788 case Primitive::kPrimFloat:
3789 case Primitive::kPrimDouble:
3790 locations->SetInAt(0, Location::RequiresFpuRegister());
3791 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3792 break;
3793
3794 default:
3795 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3796 }
3797}
3798
3799void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
3800 Primitive::Type type = instruction->GetType();
3801 LocationSummary* locations = instruction->GetLocations();
3802
3803 switch (type) {
3804 case Primitive::kPrimInt:
3805 case Primitive::kPrimLong: {
3806 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3807 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3808 if (type == Primitive::kPrimInt)
3809 __ Subu(dst, ZERO, src);
3810 else
3811 __ Dsubu(dst, ZERO, src);
3812 break;
3813 }
3814 case Primitive::kPrimFloat:
3815 case Primitive::kPrimDouble: {
3816 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3817 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3818 if (type == Primitive::kPrimFloat)
3819 __ NegS(dst, src);
3820 else
3821 __ NegD(dst, src);
3822 break;
3823 }
3824 default:
3825 LOG(FATAL) << "Unexpected neg type " << type;
3826 }
3827}
3828
3829void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
3830 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003831 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003832 InvokeRuntimeCallingConvention calling_convention;
3833 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3834 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3835 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3836 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3837}
3838
3839void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
3840 LocationSummary* locations = instruction->GetLocations();
3841 // Move an uint16_t value to a register.
Andreas Gampea5b09a62016-11-17 15:21:22 -08003842 __ LoadConst32(locations->GetTemp(0).AsRegister<GpuRegister>(),
3843 instruction->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003844 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003845 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3846}
3847
3848void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
3849 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003850 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003851 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003852 if (instruction->IsStringAlloc()) {
3853 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3854 } else {
3855 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00003856 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003857 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3858}
3859
3860void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
David Brazdil6de19382016-01-08 17:37:10 +00003861 if (instruction->IsStringAlloc()) {
3862 // String is allocated through StringFactory. Call NewEmptyString entry point.
3863 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02003864 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07003865 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003866 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
3867 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
3868 __ Jalr(T9);
3869 __ Nop();
3870 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3871 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01003872 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00003873 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00003874 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003875}
3876
3877void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
3878 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3879 locations->SetInAt(0, Location::RequiresRegister());
3880 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3881}
3882
3883void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
3884 Primitive::Type type = instruction->GetType();
3885 LocationSummary* locations = instruction->GetLocations();
3886
3887 switch (type) {
3888 case Primitive::kPrimInt:
3889 case Primitive::kPrimLong: {
3890 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3891 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3892 __ Nor(dst, src, ZERO);
3893 break;
3894 }
3895
3896 default:
3897 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3898 }
3899}
3900
3901void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3902 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3903 locations->SetInAt(0, Location::RequiresRegister());
3904 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3905}
3906
3907void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3908 LocationSummary* locations = instruction->GetLocations();
3909 __ Xori(locations->Out().AsRegister<GpuRegister>(),
3910 locations->InAt(0).AsRegister<GpuRegister>(),
3911 1);
3912}
3913
3914void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003915 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
3916 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003917}
3918
Calin Juravle2ae48182016-03-16 14:05:09 +00003919void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
3920 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003921 return;
3922 }
3923 Location obj = instruction->GetLocations()->InAt(0);
3924
3925 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00003926 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003927}
3928
Calin Juravle2ae48182016-03-16 14:05:09 +00003929void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003930 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00003931 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003932
3933 Location obj = instruction->GetLocations()->InAt(0);
3934
3935 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3936}
3937
3938void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00003939 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003940}
3941
3942void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
3943 HandleBinaryOp(instruction);
3944}
3945
3946void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
3947 HandleBinaryOp(instruction);
3948}
3949
3950void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3951 LOG(FATAL) << "Unreachable";
3952}
3953
3954void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
3955 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3956}
3957
3958void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
3959 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3960 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3961 if (location.IsStackSlot()) {
3962 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3963 } else if (location.IsDoubleStackSlot()) {
3964 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3965 }
3966 locations->SetOut(location);
3967}
3968
3969void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
3970 ATTRIBUTE_UNUSED) {
3971 // Nothing to do, the parameter is already at its location.
3972}
3973
3974void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
3975 LocationSummary* locations =
3976 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3977 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3978}
3979
3980void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
3981 ATTRIBUTE_UNUSED) {
3982 // Nothing to do, the method is already at its location.
3983}
3984
3985void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
3986 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003987 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003988 locations->SetInAt(i, Location::Any());
3989 }
3990 locations->SetOut(Location::Any());
3991}
3992
3993void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3994 LOG(FATAL) << "Unreachable";
3995}
3996
3997void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
3998 Primitive::Type type = rem->GetResultType();
3999 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004000 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
4001 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004002 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
4003
4004 switch (type) {
4005 case Primitive::kPrimInt:
4006 case Primitive::kPrimLong:
4007 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07004008 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004009 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
4010 break;
4011
4012 case Primitive::kPrimFloat:
4013 case Primitive::kPrimDouble: {
4014 InvokeRuntimeCallingConvention calling_convention;
4015 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
4016 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
4017 locations->SetOut(calling_convention.GetReturnLocation(type));
4018 break;
4019 }
4020
4021 default:
4022 LOG(FATAL) << "Unexpected rem type " << type;
4023 }
4024}
4025
4026void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
4027 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004028
4029 switch (type) {
4030 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07004031 case Primitive::kPrimLong:
4032 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004033 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004034
4035 case Primitive::kPrimFloat:
4036 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01004037 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
4038 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00004039 if (type == Primitive::kPrimFloat) {
4040 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
4041 } else {
4042 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4043 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004044 break;
4045 }
4046 default:
4047 LOG(FATAL) << "Unexpected rem type " << type;
4048 }
4049}
4050
4051void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4052 memory_barrier->SetLocations(nullptr);
4053}
4054
4055void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4056 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
4057}
4058
4059void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
4060 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
4061 Primitive::Type return_type = ret->InputAt(0)->GetType();
4062 locations->SetInAt(0, Mips64ReturnLocation(return_type));
4063}
4064
4065void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
4066 codegen_->GenerateFrameExit();
4067}
4068
4069void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
4070 ret->SetLocations(nullptr);
4071}
4072
4073void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
4074 codegen_->GenerateFrameExit();
4075}
4076
Alexey Frunze92d90602015-12-18 18:16:36 -08004077void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
4078 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004079}
4080
Alexey Frunze92d90602015-12-18 18:16:36 -08004081void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
4082 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004083}
4084
Alexey Frunze4dda3372015-06-01 18:31:49 -07004085void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
4086 HandleShift(shl);
4087}
4088
4089void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
4090 HandleShift(shl);
4091}
4092
4093void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
4094 HandleShift(shr);
4095}
4096
4097void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
4098 HandleShift(shr);
4099}
4100
Alexey Frunze4dda3372015-06-01 18:31:49 -07004101void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
4102 HandleBinaryOp(instruction);
4103}
4104
4105void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
4106 HandleBinaryOp(instruction);
4107}
4108
4109void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4110 HandleFieldGet(instruction, instruction->GetFieldInfo());
4111}
4112
4113void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4114 HandleFieldGet(instruction, instruction->GetFieldInfo());
4115}
4116
4117void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4118 HandleFieldSet(instruction, instruction->GetFieldInfo());
4119}
4120
4121void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004122 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004123}
4124
Calin Juravlee460d1d2015-09-29 04:52:17 +01004125void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
4126 HUnresolvedInstanceFieldGet* instruction) {
4127 FieldAccessCallingConventionMIPS64 calling_convention;
4128 codegen_->CreateUnresolvedFieldLocationSummary(
4129 instruction, instruction->GetFieldType(), calling_convention);
4130}
4131
4132void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
4133 HUnresolvedInstanceFieldGet* instruction) {
4134 FieldAccessCallingConventionMIPS64 calling_convention;
4135 codegen_->GenerateUnresolvedFieldAccess(instruction,
4136 instruction->GetFieldType(),
4137 instruction->GetFieldIndex(),
4138 instruction->GetDexPc(),
4139 calling_convention);
4140}
4141
4142void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
4143 HUnresolvedInstanceFieldSet* instruction) {
4144 FieldAccessCallingConventionMIPS64 calling_convention;
4145 codegen_->CreateUnresolvedFieldLocationSummary(
4146 instruction, instruction->GetFieldType(), calling_convention);
4147}
4148
4149void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
4150 HUnresolvedInstanceFieldSet* instruction) {
4151 FieldAccessCallingConventionMIPS64 calling_convention;
4152 codegen_->GenerateUnresolvedFieldAccess(instruction,
4153 instruction->GetFieldType(),
4154 instruction->GetFieldIndex(),
4155 instruction->GetDexPc(),
4156 calling_convention);
4157}
4158
4159void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
4160 HUnresolvedStaticFieldGet* instruction) {
4161 FieldAccessCallingConventionMIPS64 calling_convention;
4162 codegen_->CreateUnresolvedFieldLocationSummary(
4163 instruction, instruction->GetFieldType(), calling_convention);
4164}
4165
4166void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
4167 HUnresolvedStaticFieldGet* instruction) {
4168 FieldAccessCallingConventionMIPS64 calling_convention;
4169 codegen_->GenerateUnresolvedFieldAccess(instruction,
4170 instruction->GetFieldType(),
4171 instruction->GetFieldIndex(),
4172 instruction->GetDexPc(),
4173 calling_convention);
4174}
4175
4176void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
4177 HUnresolvedStaticFieldSet* instruction) {
4178 FieldAccessCallingConventionMIPS64 calling_convention;
4179 codegen_->CreateUnresolvedFieldLocationSummary(
4180 instruction, instruction->GetFieldType(), calling_convention);
4181}
4182
4183void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
4184 HUnresolvedStaticFieldSet* instruction) {
4185 FieldAccessCallingConventionMIPS64 calling_convention;
4186 codegen_->GenerateUnresolvedFieldAccess(instruction,
4187 instruction->GetFieldType(),
4188 instruction->GetFieldIndex(),
4189 instruction->GetDexPc(),
4190 calling_convention);
4191}
4192
Alexey Frunze4dda3372015-06-01 18:31:49 -07004193void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004194 LocationSummary* locations =
4195 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004196 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07004197}
4198
4199void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
4200 HBasicBlock* block = instruction->GetBlock();
4201 if (block->GetLoopInformation() != nullptr) {
4202 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4203 // The back edge will generate the suspend check.
4204 return;
4205 }
4206 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4207 // The goto will generate the suspend check.
4208 return;
4209 }
4210 GenerateSuspendCheck(instruction, nullptr);
4211}
4212
Alexey Frunze4dda3372015-06-01 18:31:49 -07004213void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
4214 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004215 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004216 InvokeRuntimeCallingConvention calling_convention;
4217 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4218}
4219
4220void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01004221 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004222 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
4223}
4224
4225void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4226 Primitive::Type input_type = conversion->GetInputType();
4227 Primitive::Type result_type = conversion->GetResultType();
4228 DCHECK_NE(input_type, result_type);
4229
4230 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4231 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4232 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4233 }
4234
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004235 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
4236
4237 if (Primitive::IsFloatingPointType(input_type)) {
4238 locations->SetInAt(0, Location::RequiresFpuRegister());
4239 } else {
4240 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004241 }
4242
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004243 if (Primitive::IsFloatingPointType(result_type)) {
4244 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004245 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004246 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004247 }
4248}
4249
4250void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4251 LocationSummary* locations = conversion->GetLocations();
4252 Primitive::Type result_type = conversion->GetResultType();
4253 Primitive::Type input_type = conversion->GetInputType();
4254
4255 DCHECK_NE(input_type, result_type);
4256
4257 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
4258 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4259 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4260
4261 switch (result_type) {
4262 case Primitive::kPrimChar:
4263 __ Andi(dst, src, 0xFFFF);
4264 break;
4265 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004266 if (input_type == Primitive::kPrimLong) {
4267 // Type conversion from long to types narrower than int is a result of code
4268 // transformations. To avoid unpredictable results for SEB and SEH, we first
4269 // need to sign-extend the low 32-bit value into bits 32 through 63.
4270 __ Sll(dst, src, 0);
4271 __ Seb(dst, dst);
4272 } else {
4273 __ Seb(dst, src);
4274 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004275 break;
4276 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004277 if (input_type == Primitive::kPrimLong) {
4278 // Type conversion from long to types narrower than int is a result of code
4279 // transformations. To avoid unpredictable results for SEB and SEH, we first
4280 // need to sign-extend the low 32-bit value into bits 32 through 63.
4281 __ Sll(dst, src, 0);
4282 __ Seh(dst, dst);
4283 } else {
4284 __ Seh(dst, src);
4285 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004286 break;
4287 case Primitive::kPrimInt:
4288 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01004289 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
4290 // conversions, except when the input and output registers are the same and we are not
4291 // converting longs to shorter types. In these cases, do nothing.
4292 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
4293 __ Sll(dst, src, 0);
4294 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004295 break;
4296
4297 default:
4298 LOG(FATAL) << "Unexpected type conversion from " << input_type
4299 << " to " << result_type;
4300 }
4301 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004302 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4303 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4304 if (input_type == Primitive::kPrimLong) {
4305 __ Dmtc1(src, FTMP);
4306 if (result_type == Primitive::kPrimFloat) {
4307 __ Cvtsl(dst, FTMP);
4308 } else {
4309 __ Cvtdl(dst, FTMP);
4310 }
4311 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004312 __ Mtc1(src, FTMP);
4313 if (result_type == Primitive::kPrimFloat) {
4314 __ Cvtsw(dst, FTMP);
4315 } else {
4316 __ Cvtdw(dst, FTMP);
4317 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004318 }
4319 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
4320 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004321 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4322 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4323 Mips64Label truncate;
4324 Mips64Label done;
4325
4326 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
4327 // value when the input is either a NaN or is outside of the range of the output type
4328 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
4329 // the same result.
4330 //
4331 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
4332 // value of the output type if the input is outside of the range after the truncation or
4333 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
4334 // results. This matches the desired float/double-to-int/long conversion exactly.
4335 //
4336 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
4337 //
4338 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
4339 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
4340 // even though it must be NAN2008=1 on R6.
4341 //
4342 // The code takes care of the different behaviors by first comparing the input to the
4343 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
4344 // If the input is greater than or equal to the minimum, it procedes to the truncate
4345 // instruction, which will handle such an input the same way irrespective of NAN2008.
4346 // Otherwise the input is compared to itself to determine whether it is a NaN or not
4347 // in order to return either zero or the minimum value.
4348 //
4349 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
4350 // truncate instruction for MIPS64R6.
4351 if (input_type == Primitive::kPrimFloat) {
4352 uint32_t min_val = (result_type == Primitive::kPrimLong)
4353 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
4354 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
4355 __ LoadConst32(TMP, min_val);
4356 __ Mtc1(TMP, FTMP);
4357 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004358 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004359 uint64_t min_val = (result_type == Primitive::kPrimLong)
4360 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
4361 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
4362 __ LoadConst64(TMP, min_val);
4363 __ Dmtc1(TMP, FTMP);
4364 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004365 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004366
4367 __ Bc1nez(FTMP, &truncate);
4368
4369 if (input_type == Primitive::kPrimFloat) {
4370 __ CmpEqS(FTMP, src, src);
4371 } else {
4372 __ CmpEqD(FTMP, src, src);
4373 }
4374 if (result_type == Primitive::kPrimLong) {
4375 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
4376 } else {
4377 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
4378 }
4379 __ Mfc1(TMP, FTMP);
4380 __ And(dst, dst, TMP);
4381
4382 __ Bc(&done);
4383
4384 __ Bind(&truncate);
4385
4386 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00004387 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004388 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004389 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004390 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004391 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004392 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004393 } else {
4394 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004395 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004396 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004397 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004398 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004399 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004400 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004401
4402 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004403 } else if (Primitive::IsFloatingPointType(result_type) &&
4404 Primitive::IsFloatingPointType(input_type)) {
4405 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4406 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4407 if (result_type == Primitive::kPrimFloat) {
4408 __ Cvtsd(dst, src);
4409 } else {
4410 __ Cvtds(dst, src);
4411 }
4412 } else {
4413 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4414 << " to " << result_type;
4415 }
4416}
4417
4418void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
4419 HandleShift(ushr);
4420}
4421
4422void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
4423 HandleShift(ushr);
4424}
4425
4426void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
4427 HandleBinaryOp(instruction);
4428}
4429
4430void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
4431 HandleBinaryOp(instruction);
4432}
4433
4434void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4435 // Nothing to do, this should be removed during prepare for register allocator.
4436 LOG(FATAL) << "Unreachable";
4437}
4438
4439void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4440 // Nothing to do, this should be removed during prepare for register allocator.
4441 LOG(FATAL) << "Unreachable";
4442}
4443
4444void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004445 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004446}
4447
4448void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004449 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004450}
4451
4452void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004453 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004454}
4455
4456void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004457 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004458}
4459
4460void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004461 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004462}
4463
4464void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004465 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004466}
4467
4468void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004469 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004470}
4471
4472void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004473 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004474}
4475
4476void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004477 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004478}
4479
4480void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004481 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004482}
4483
4484void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004485 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004486}
4487
4488void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004489 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004490}
4491
Aart Bike9f37602015-10-09 11:15:55 -07004492void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004493 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004494}
4495
4496void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004497 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004498}
4499
4500void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004501 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004502}
4503
4504void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004505 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004506}
4507
4508void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004509 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004510}
4511
4512void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004513 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004514}
4515
4516void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004517 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004518}
4519
4520void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004521 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004522}
4523
Mark Mendellfe57faa2015-09-18 09:26:15 -04004524// Simple implementation of packed switch - generate cascaded compare/jumps.
4525void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4526 LocationSummary* locations =
4527 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4528 locations->SetInAt(0, Location::RequiresRegister());
4529}
4530
Alexey Frunze0960ac52016-12-20 17:24:59 -08004531void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
4532 int32_t lower_bound,
4533 uint32_t num_entries,
4534 HBasicBlock* switch_block,
4535 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004536 // Create a set of compare/jumps.
4537 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08004538 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004539 // Jump to default if index is negative
4540 // Note: We don't check the case that index is positive while value < lower_bound, because in
4541 // this case, index >= num_entries must be true. So that we can save one branch instruction.
4542 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
4543
Alexey Frunze0960ac52016-12-20 17:24:59 -08004544 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004545 // Jump to successors[0] if value == lower_bound.
4546 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
4547 int32_t last_index = 0;
4548 for (; num_entries - last_index > 2; last_index += 2) {
4549 __ Addiu(temp_reg, temp_reg, -2);
4550 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4551 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
4552 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4553 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
4554 }
4555 if (num_entries - last_index == 2) {
4556 // The last missing case_value.
4557 __ Addiu(temp_reg, temp_reg, -1);
4558 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004559 }
4560
4561 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08004562 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004563 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004564 }
4565}
4566
Alexey Frunze0960ac52016-12-20 17:24:59 -08004567void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
4568 int32_t lower_bound,
4569 uint32_t num_entries,
4570 HBasicBlock* switch_block,
4571 HBasicBlock* default_block) {
4572 // Create a jump table.
4573 std::vector<Mips64Label*> labels(num_entries);
4574 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
4575 for (uint32_t i = 0; i < num_entries; i++) {
4576 labels[i] = codegen_->GetLabelOf(successors[i]);
4577 }
4578 JumpTable* table = __ CreateJumpTable(std::move(labels));
4579
4580 // Is the value in range?
4581 __ Addiu32(TMP, value_reg, -lower_bound);
4582 __ LoadConst32(AT, num_entries);
4583 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
4584
4585 // We are in the range of the table.
4586 // Load the target address from the jump table, indexing by the value.
4587 __ LoadLabelAddress(AT, table->GetLabel());
4588 __ Sll(TMP, TMP, 2);
4589 __ Daddu(TMP, TMP, AT);
4590 __ Lw(TMP, TMP, 0);
4591 // Compute the absolute target address by adding the table start address
4592 // (the table contains offsets to targets relative to its start).
4593 __ Daddu(TMP, TMP, AT);
4594 // And jump.
4595 __ Jr(TMP);
4596 __ Nop();
4597}
4598
4599void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4600 int32_t lower_bound = switch_instr->GetStartValue();
4601 uint32_t num_entries = switch_instr->GetNumEntries();
4602 LocationSummary* locations = switch_instr->GetLocations();
4603 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
4604 HBasicBlock* switch_block = switch_instr->GetBlock();
4605 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4606
4607 if (num_entries > kPackedSwitchJumpTableThreshold) {
4608 GenTableBasedPackedSwitch(value_reg,
4609 lower_bound,
4610 num_entries,
4611 switch_block,
4612 default_block);
4613 } else {
4614 GenPackedSwitchWithCompares(value_reg,
4615 lower_bound,
4616 num_entries,
4617 switch_block,
4618 default_block);
4619 }
4620}
4621
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004622void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet*) {
4623 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4624}
4625
4626void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet*) {
4627 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4628}
4629
Alexey Frunze4dda3372015-06-01 18:31:49 -07004630} // namespace mips64
4631} // namespace art