blob: aedf31e6a4834d85a54d24facb8b39845422e910 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
Alexey Frunze4dda3372015-06-01 18:31:49 -070094 // TODO: shouldn't we use a whole machine word per argument on the stack?
95 // Implicit 4-byte method pointer (and such) will cause misalignment.
96
97 return next_location;
98}
99
100Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
101 return Mips64ReturnLocation(type);
102}
103
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100104// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
105#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700106#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
108class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
109 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000110 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111
112 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100113 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700114 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
115 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000116 if (instruction_->CanThrowIntoCatchBlock()) {
117 // Live registers will be restored in the catch block if caught.
118 SaveLiveRegisters(codegen, instruction_->GetLocations());
119 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700120 // We're moving two locations to locations that could overlap, so we need a parallel
121 // move resolver.
122 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
125 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100126 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700127 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
128 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100129 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
130 ? kQuickThrowStringBounds
131 : kQuickThrowArrayBounds;
132 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100133 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
135 }
136
Alexandre Rames8158f282015-08-07 10:26:17 +0100137 bool IsFatal() const OVERRIDE { return true; }
138
Roland Levillain46648892015-06-19 16:07:18 +0100139 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
140
Alexey Frunze4dda3372015-06-01 18:31:49 -0700141 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700142 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
143};
144
145class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
146 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000147 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700148
149 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
150 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
151 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100152 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
154 }
155
Alexandre Rames8158f282015-08-07 10:26:17 +0100156 bool IsFatal() const OVERRIDE { return true; }
157
Roland Levillain46648892015-06-19 16:07:18 +0100158 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
162};
163
164class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
165 public:
166 LoadClassSlowPathMIPS64(HLoadClass* cls,
167 HInstruction* at,
168 uint32_t dex_pc,
169 bool do_clinit)
David Srbecky9cd6d372016-02-09 15:24:47 +0000170 : SlowPathCodeMIPS64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
172 }
173
174 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
175 LocationSummary* locations = at_->GetLocations();
176 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
177
178 __ Bind(GetEntryLabel());
179 SaveLiveRegisters(codegen, locations);
180
181 InvokeRuntimeCallingConvention calling_convention;
Andreas Gampea5b09a62016-11-17 15:21:22 -0800182 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100183 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
184 : kQuickInitializeType;
185 mips64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700186 if (do_clinit_) {
187 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
188 } else {
189 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
190 }
191
192 // Move the class to the desired location.
193 Location out = locations->Out();
194 if (out.IsValid()) {
195 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
196 Primitive::Type type = at_->GetType();
197 mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
198 }
199
200 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700201 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700202 }
203
Roland Levillain46648892015-06-19 16:07:18 +0100204 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
205
Alexey Frunze4dda3372015-06-01 18:31:49 -0700206 private:
207 // The class this slow path will load.
208 HLoadClass* const cls_;
209
210 // The instruction where this slow path is happening.
211 // (Might be the load class or an initialization check).
212 HInstruction* const at_;
213
214 // The dex PC of `at_`.
215 const uint32_t dex_pc_;
216
217 // Whether to initialize the class.
218 const bool do_clinit_;
219
220 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
221};
222
223class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
224 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000225 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
228 LocationSummary* locations = instruction_->GetLocations();
229 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
230 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
231
232 __ Bind(GetEntryLabel());
233 SaveLiveRegisters(codegen, locations);
234
235 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800236 HLoadString* load = instruction_->AsLoadString();
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800237 const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
David Srbecky9cd6d372016-02-09 15:24:47 +0000238 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
Serban Constantinescufc734082016-07-19 17:18:07 +0100239 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240 instruction_,
241 instruction_->GetDexPc(),
242 this);
243 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
244 Primitive::Type type = instruction_->GetType();
245 mips64_codegen->MoveLocation(locations->Out(),
246 calling_convention.GetReturnLocation(type),
247 type);
248
249 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800250
251 // Store the resolved String to the BSS entry.
252 // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary for the
253 // .bss entry address in the fast path, so that we can avoid another calculation here.
254 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
255 DCHECK_NE(out, AT);
256 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
257 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
258 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
259 __ Sw(out, AT, /* placeholder */ 0x5678);
260
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700261 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 }
263
Roland Levillain46648892015-06-19 16:07:18 +0100264 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
265
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700267 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
268};
269
270class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
271 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000272 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700273
274 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
275 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
276 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000277 if (instruction_->CanThrowIntoCatchBlock()) {
278 // Live registers will be restored in the catch block if caught.
279 SaveLiveRegisters(codegen, instruction_->GetLocations());
280 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100281 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700282 instruction_,
283 instruction_->GetDexPc(),
284 this);
285 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
286 }
287
Alexandre Rames8158f282015-08-07 10:26:17 +0100288 bool IsFatal() const OVERRIDE { return true; }
289
Roland Levillain46648892015-06-19 16:07:18 +0100290 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
291
Alexey Frunze4dda3372015-06-01 18:31:49 -0700292 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700293 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
294};
295
296class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
297 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100298 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000299 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700300
301 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
302 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
303 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100304 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700306 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700307 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700308 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700309 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700310 }
311 }
312
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700313 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700314 DCHECK(successor_ == nullptr);
315 return &return_label_;
316 }
317
Roland Levillain46648892015-06-19 16:07:18 +0100318 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
319
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700321 // If not null, the block to branch to after the suspend check.
322 HBasicBlock* const successor_;
323
324 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700325 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700326
327 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
328};
329
330class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
331 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000332 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700333
334 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
335 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800336
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100337 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700338 DCHECK(instruction_->IsCheckCast()
339 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
340 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
341
342 __ Bind(GetEntryLabel());
343 SaveLiveRegisters(codegen, locations);
344
345 // We're moving two locations to locations that could overlap, so we need a parallel
346 // move resolver.
347 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800348 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700349 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
350 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800351 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700352 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
353 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100355 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800356 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700357 Primitive::Type ret_type = instruction_->GetType();
358 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
359 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360 } else {
361 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800362 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
363 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700364 }
365
366 RestoreLiveRegisters(codegen, locations);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700367 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368 }
369
Roland Levillain46648892015-06-19 16:07:18 +0100370 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
371
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700373 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
374};
375
376class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
377 public:
Aart Bik42249c32016-01-07 15:33:50 -0800378 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000379 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700380
381 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800382 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100384 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000385 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700386 }
387
Roland Levillain46648892015-06-19 16:07:18 +0100388 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
389
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700391 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
392};
393
394CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
395 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100396 const CompilerOptions& compiler_options,
397 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 : CodeGenerator(graph,
399 kNumberOfGpuRegisters,
400 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000401 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
403 arraysize(kCoreCalleeSaves)),
404 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
405 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100406 compiler_options,
407 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100408 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700409 location_builder_(graph, this),
410 instruction_visitor_(graph, this),
411 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100412 assembler_(graph->GetArena()),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800413 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800414 uint32_literals_(std::less<uint32_t>(),
415 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800416 uint64_literals_(std::less<uint64_t>(),
417 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
418 method_patches_(MethodReferenceComparator(),
419 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
420 call_patches_(MethodReferenceComparator(),
421 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
422 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800423 relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
424 boot_image_string_patches_(StringReferenceValueComparator(),
425 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
426 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
427 boot_image_type_patches_(TypeReferenceValueComparator(),
428 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
429 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
430 boot_image_address_patches_(std::less<uint32_t>(),
431 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700432 // Save RA (containing the return address) to mimic Quick.
433 AddAllocatedRegister(Location::RegisterLocation(RA));
434}
435
436#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100437// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
438#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700439#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700440
441void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700442 // Ensure that we fix up branches.
443 __ FinalizeCode();
444
445 // Adjust native pc offsets in stack maps.
446 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
447 uint32_t old_position = stack_map_stream_.GetStackMap(i).native_pc_offset;
448 uint32_t new_position = __ GetAdjustedPosition(old_position);
449 DCHECK_GE(new_position, old_position);
450 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
451 }
452
453 // Adjust pc offsets for the disassembly information.
454 if (disasm_info_ != nullptr) {
455 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
456 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
457 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
458 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
459 it.second.start = __ GetAdjustedPosition(it.second.start);
460 it.second.end = __ GetAdjustedPosition(it.second.end);
461 }
462 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
463 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
464 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
465 }
466 }
467
Alexey Frunze4dda3372015-06-01 18:31:49 -0700468 CodeGenerator::Finalize(allocator);
469}
470
471Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
472 return codegen_->GetAssembler();
473}
474
475void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100476 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700477 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
478}
479
480void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +0100481 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -0700482 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
483}
484
485void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
486 // Pop reg
487 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +0200488 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700489}
490
491void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
492 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +0200493 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700494 __ Sd(GpuRegister(reg), SP, 0);
495}
496
497void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
498 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
499 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
500 // Allocate a scratch register other than TMP, if available.
501 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
502 // automatically unspilled when the scratch scope object is destroyed).
503 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
504 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +0200505 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700506 __ LoadFromOffset(load_type,
507 GpuRegister(ensure_scratch.GetRegister()),
508 SP,
509 index1 + stack_offset);
510 __ LoadFromOffset(load_type,
511 TMP,
512 SP,
513 index2 + stack_offset);
514 __ StoreToOffset(store_type,
515 GpuRegister(ensure_scratch.GetRegister()),
516 SP,
517 index2 + stack_offset);
518 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
519}
520
521static dwarf::Reg DWARFReg(GpuRegister reg) {
522 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
523}
524
David Srbeckyba702002016-02-01 18:15:29 +0000525static dwarf::Reg DWARFReg(FpuRegister reg) {
526 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
527}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700528
529void CodeGeneratorMIPS64::GenerateFrameEntry() {
530 __ Bind(&frame_entry_label_);
531
532 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
533
534 if (do_overflow_check) {
535 __ LoadFromOffset(kLoadWord,
536 ZERO,
537 SP,
538 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
539 RecordPcInfo(nullptr, 0);
540 }
541
Alexey Frunze4dda3372015-06-01 18:31:49 -0700542 if (HasEmptyFrame()) {
543 return;
544 }
545
546 // Make sure the frame size isn't unreasonably large. Per the various APIs
547 // it looks like it should always be less than 2GB in size, which allows
548 // us using 32-bit signed offsets from the stack pointer.
549 if (GetFrameSize() > 0x7FFFFFFF)
550 LOG(FATAL) << "Stack frame larger than 2GB";
551
552 // Spill callee-saved registers.
553 // Note that their cumulative size is small and they can be indexed using
554 // 16-bit offsets.
555
556 // TODO: increment/decrement SP in one step instead of two or remove this comment.
557
558 uint32_t ofs = FrameEntrySpillSize();
559 __ IncreaseFrameSize(ofs);
560
561 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
562 GpuRegister reg = kCoreCalleeSaves[i];
563 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200564 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700565 __ Sd(reg, SP, ofs);
566 __ cfi().RelOffset(DWARFReg(reg), ofs);
567 }
568 }
569
570 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
571 FpuRegister reg = kFpuCalleeSaves[i];
572 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +0200573 ofs -= kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700574 __ Sdc1(reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +0000575 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700576 }
577 }
578
579 // Allocate the rest of the frame and store the current method pointer
580 // at its end.
581
582 __ IncreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
583
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +0100584 // Save the current method if we need it. Note that we do not
585 // do this in HCurrentMethod, as the instruction might have been removed
586 // in the SSA graph.
587 if (RequiresCurrentMethod()) {
588 static_assert(IsInt<16>(kCurrentMethodStackOffset),
589 "kCurrentMethodStackOffset must fit into int16_t");
590 __ Sd(kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
591 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +0100592
593 if (GetGraph()->HasShouldDeoptimizeFlag()) {
594 // Initialize should_deoptimize flag to 0.
595 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
596 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700597}
598
599void CodeGeneratorMIPS64::GenerateFrameExit() {
600 __ cfi().RememberState();
601
Alexey Frunze4dda3372015-06-01 18:31:49 -0700602 if (!HasEmptyFrame()) {
603 // Deallocate the rest of the frame.
604
605 __ DecreaseFrameSize(GetFrameSize() - FrameEntrySpillSize());
606
607 // Restore callee-saved registers.
608 // Note that their cumulative size is small and they can be indexed using
609 // 16-bit offsets.
610
611 // TODO: increment/decrement SP in one step instead of two or remove this comment.
612
613 uint32_t ofs = 0;
614
615 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
616 FpuRegister reg = kFpuCalleeSaves[i];
617 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
618 __ Ldc1(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200619 ofs += kMips64DoublewordSize;
David Srbeckyba702002016-02-01 18:15:29 +0000620 __ cfi().Restore(DWARFReg(reg));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700621 }
622 }
623
624 for (size_t i = 0; i < arraysize(kCoreCalleeSaves); ++i) {
625 GpuRegister reg = kCoreCalleeSaves[i];
626 if (allocated_registers_.ContainsCoreRegister(reg)) {
627 __ Ld(reg, SP, ofs);
Lazar Trsicd9672662015-09-03 17:33:01 +0200628 ofs += kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700629 __ cfi().Restore(DWARFReg(reg));
630 }
631 }
632
633 DCHECK_EQ(ofs, FrameEntrySpillSize());
634 __ DecreaseFrameSize(ofs);
635 }
636
637 __ Jr(RA);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700638 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700639
640 __ cfi().RestoreState();
641 __ cfi().DefCFAOffset(GetFrameSize());
642}
643
644void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
645 __ Bind(GetLabelOf(block));
646}
647
648void CodeGeneratorMIPS64::MoveLocation(Location destination,
649 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +0100650 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700651 if (source.Equals(destination)) {
652 return;
653 }
654
655 // A valid move can always be inferred from the destination and source
656 // locations. When moving from and to a register, the argument type can be
657 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100658 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700659 DCHECK_EQ(unspecified_type, false);
660
661 if (destination.IsRegister() || destination.IsFpuRegister()) {
662 if (unspecified_type) {
663 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
664 if (source.IsStackSlot() ||
665 (src_cst != nullptr && (src_cst->IsIntConstant()
666 || src_cst->IsFloatConstant()
667 || src_cst->IsNullConstant()))) {
668 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100669 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700670 } else {
671 // If the source is a double stack slot or a 64bit constant, a 64bit
672 // type is appropriate. Else the source is a register, and since the
673 // type has not been specified, we chose a 64bit type to force a 64bit
674 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +0100675 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700676 }
677 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100678 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
679 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700680 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
681 // Move to GPR/FPR from stack
682 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100683 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700684 __ LoadFpuFromOffset(load_type,
685 destination.AsFpuRegister<FpuRegister>(),
686 SP,
687 source.GetStackIndex());
688 } else {
689 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
690 __ LoadFromOffset(load_type,
691 destination.AsRegister<GpuRegister>(),
692 SP,
693 source.GetStackIndex());
694 }
695 } else if (source.IsConstant()) {
696 // Move to GPR/FPR from constant
697 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +0100698 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700699 gpr = destination.AsRegister<GpuRegister>();
700 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100701 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700702 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
703 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
704 gpr = ZERO;
705 } else {
706 __ LoadConst32(gpr, value);
707 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700708 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700709 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
710 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
711 gpr = ZERO;
712 } else {
713 __ LoadConst64(gpr, value);
714 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700715 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100716 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700717 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +0100718 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700719 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
720 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100721 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700722 if (destination.IsRegister()) {
723 // Move to GPR from GPR
724 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
725 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100726 DCHECK(destination.IsFpuRegister());
727 if (Primitive::Is64BitType(dst_type)) {
728 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
729 } else {
730 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
731 }
732 }
733 } else if (source.IsFpuRegister()) {
734 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700735 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +0100736 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700737 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
738 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100739 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700740 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
741 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100742 } else {
743 DCHECK(destination.IsRegister());
744 if (Primitive::Is64BitType(dst_type)) {
745 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
746 } else {
747 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
748 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700749 }
750 }
751 } else { // The destination is not a register. It must be a stack slot.
752 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
753 if (source.IsRegister() || source.IsFpuRegister()) {
754 if (unspecified_type) {
755 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100756 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700757 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +0100758 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700759 }
760 }
Calin Juravlee460d1d2015-09-29 04:52:17 +0100761 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
762 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700763 // Move to stack from GPR/FPR
764 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
765 if (source.IsRegister()) {
766 __ StoreToOffset(store_type,
767 source.AsRegister<GpuRegister>(),
768 SP,
769 destination.GetStackIndex());
770 } else {
771 __ StoreFpuToOffset(store_type,
772 source.AsFpuRegister<FpuRegister>(),
773 SP,
774 destination.GetStackIndex());
775 }
776 } else if (source.IsConstant()) {
777 // Move to stack from constant
778 HConstant* src_cst = source.GetConstant();
779 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700780 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700781 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700782 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
783 if (value != 0) {
784 gpr = TMP;
785 __ LoadConst32(gpr, value);
786 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700787 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700788 DCHECK(destination.IsDoubleStackSlot());
789 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
790 if (value != 0) {
791 gpr = TMP;
792 __ LoadConst64(gpr, value);
793 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700794 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700795 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700796 } else {
797 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
798 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
799 // Move to stack from stack
800 if (destination.IsStackSlot()) {
801 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
802 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
803 } else {
804 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
805 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
806 }
807 }
808 }
809}
810
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700811void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700812 DCHECK(!loc1.IsConstant());
813 DCHECK(!loc2.IsConstant());
814
815 if (loc1.Equals(loc2)) {
816 return;
817 }
818
819 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
820 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
821 bool is_fp_reg1 = loc1.IsFpuRegister();
822 bool is_fp_reg2 = loc2.IsFpuRegister();
823
824 if (loc2.IsRegister() && loc1.IsRegister()) {
825 // Swap 2 GPRs
826 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
827 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
828 __ Move(TMP, r2);
829 __ Move(r2, r1);
830 __ Move(r1, TMP);
831 } else if (is_fp_reg2 && is_fp_reg1) {
832 // Swap 2 FPRs
833 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
834 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -0700835 if (type == Primitive::kPrimFloat) {
836 __ MovS(FTMP, r1);
837 __ MovS(r1, r2);
838 __ MovS(r2, FTMP);
839 } else {
840 DCHECK_EQ(type, Primitive::kPrimDouble);
841 __ MovD(FTMP, r1);
842 __ MovD(r1, r2);
843 __ MovD(r2, FTMP);
844 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700845 } else if (is_slot1 != is_slot2) {
846 // Swap GPR/FPR and stack slot
847 Location reg_loc = is_slot1 ? loc2 : loc1;
848 Location mem_loc = is_slot1 ? loc1 : loc2;
849 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
850 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
851 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
852 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
853 if (reg_loc.IsFpuRegister()) {
854 __ StoreFpuToOffset(store_type,
855 reg_loc.AsFpuRegister<FpuRegister>(),
856 SP,
857 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700858 if (mem_loc.IsStackSlot()) {
859 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
860 } else {
861 DCHECK(mem_loc.IsDoubleStackSlot());
862 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
863 }
864 } else {
865 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
866 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
867 }
868 } else if (is_slot1 && is_slot2) {
869 move_resolver_.Exchange(loc1.GetStackIndex(),
870 loc2.GetStackIndex(),
871 loc1.IsDoubleStackSlot());
872 } else {
873 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
874 }
875}
876
Calin Juravle175dc732015-08-25 15:42:32 +0100877void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
878 DCHECK(location.IsRegister());
879 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
880}
881
Calin Juravlee460d1d2015-09-29 04:52:17 +0100882void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
883 if (location.IsRegister()) {
884 locations->AddTemp(location);
885 } else {
886 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
887 }
888}
889
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100890void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
891 GpuRegister value,
892 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700893 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700894 GpuRegister card = AT;
895 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100896 if (value_can_be_null) {
897 __ Beqzc(value, &done);
898 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700899 __ LoadFromOffset(kLoadDoubleword,
900 card,
901 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -0700902 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700903 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
904 __ Daddu(temp, card, temp);
905 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +0100906 if (value_can_be_null) {
907 __ Bind(&done);
908 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700909}
910
Alexey Frunze19f6c692016-11-30 19:19:55 -0800911template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
912inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
913 const ArenaDeque<PcRelativePatchInfo>& infos,
914 ArenaVector<LinkerPatch>* linker_patches) {
915 for (const PcRelativePatchInfo& info : infos) {
916 const DexFile& dex_file = info.target_dex_file;
917 size_t offset_or_index = info.offset_or_index;
918 DCHECK(info.pc_rel_label.IsBound());
919 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
920 linker_patches->push_back(Factory(pc_rel_offset, &dex_file, pc_rel_offset, offset_or_index));
921 }
922}
923
924void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
925 DCHECK(linker_patches->empty());
926 size_t size =
927 method_patches_.size() +
928 call_patches_.size() +
929 pc_relative_dex_cache_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -0800930 relative_call_patches_.size() +
931 pc_relative_string_patches_.size() +
932 pc_relative_type_patches_.size() +
933 boot_image_string_patches_.size() +
934 boot_image_type_patches_.size() +
935 boot_image_address_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -0800936 linker_patches->reserve(size);
937 for (const auto& entry : method_patches_) {
938 const MethodReference& target_method = entry.first;
939 Literal* literal = entry.second;
940 DCHECK(literal->GetLabel()->IsBound());
941 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
942 linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
943 target_method.dex_file,
944 target_method.dex_method_index));
945 }
946 for (const auto& entry : call_patches_) {
947 const MethodReference& target_method = entry.first;
948 Literal* literal = entry.second;
949 DCHECK(literal->GetLabel()->IsBound());
950 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
951 linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
952 target_method.dex_file,
953 target_method.dex_method_index));
954 }
955 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
956 linker_patches);
957 for (const PcRelativePatchInfo& info : relative_call_patches_) {
958 const DexFile& dex_file = info.target_dex_file;
959 uint32_t method_index = info.offset_or_index;
960 DCHECK(info.pc_rel_label.IsBound());
961 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
962 linker_patches->push_back(
963 LinkerPatch::RelativeCodePatch(pc_rel_offset, &dex_file, method_index));
964 }
Alexey Frunzef63f5692016-12-13 17:43:11 -0800965 if (!GetCompilerOptions().IsBootImage()) {
966 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
967 linker_patches);
968 } else {
969 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
970 linker_patches);
971 }
972 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
973 linker_patches);
974 for (const auto& entry : boot_image_string_patches_) {
975 const StringReference& target_string = entry.first;
976 Literal* literal = entry.second;
977 DCHECK(literal->GetLabel()->IsBound());
978 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
979 linker_patches->push_back(LinkerPatch::StringPatch(literal_offset,
980 target_string.dex_file,
981 target_string.string_index.index_));
982 }
983 for (const auto& entry : boot_image_type_patches_) {
984 const TypeReference& target_type = entry.first;
985 Literal* literal = entry.second;
986 DCHECK(literal->GetLabel()->IsBound());
987 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
988 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset,
989 target_type.dex_file,
990 target_type.type_index.index_));
991 }
992 for (const auto& entry : boot_image_address_patches_) {
993 DCHECK(GetCompilerOptions().GetIncludePatchInformation());
994 Literal* literal = entry.second;
995 DCHECK(literal->GetLabel()->IsBound());
996 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
997 linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
998 }
999}
1000
1001CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
1002 const DexFile& dex_file, uint32_t string_index) {
1003 return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
1004}
1005
1006CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
1007 const DexFile& dex_file, dex::TypeIndex type_index) {
1008 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001009}
1010
1011CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
1012 const DexFile& dex_file, uint32_t element_offset) {
1013 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
1014}
1015
1016CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeCallPatch(
1017 const DexFile& dex_file, uint32_t method_index) {
1018 return NewPcRelativePatch(dex_file, method_index, &relative_call_patches_);
1019}
1020
1021CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
1022 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
1023 patches->emplace_back(dex_file, offset_or_index);
1024 return &patches->back();
1025}
1026
Alexey Frunzef63f5692016-12-13 17:43:11 -08001027Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1028 return map->GetOrCreate(
1029 value,
1030 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1031}
1032
Alexey Frunze19f6c692016-11-30 19:19:55 -08001033Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1034 return uint64_literals_.GetOrCreate(
1035 value,
1036 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1037}
1038
1039Literal* CodeGeneratorMIPS64::DeduplicateMethodLiteral(MethodReference target_method,
1040 MethodToLiteralMap* map) {
1041 return map->GetOrCreate(
1042 target_method,
1043 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1044}
1045
1046Literal* CodeGeneratorMIPS64::DeduplicateMethodAddressLiteral(MethodReference target_method) {
1047 return DeduplicateMethodLiteral(target_method, &method_patches_);
1048}
1049
1050Literal* CodeGeneratorMIPS64::DeduplicateMethodCodeLiteral(MethodReference target_method) {
1051 return DeduplicateMethodLiteral(target_method, &call_patches_);
1052}
1053
Alexey Frunzef63f5692016-12-13 17:43:11 -08001054Literal* CodeGeneratorMIPS64::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
1055 dex::StringIndex string_index) {
1056 return boot_image_string_patches_.GetOrCreate(
1057 StringReference(&dex_file, string_index),
1058 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1059}
1060
1061Literal* CodeGeneratorMIPS64::DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
1062 dex::TypeIndex type_index) {
1063 return boot_image_type_patches_.GetOrCreate(
1064 TypeReference(&dex_file, type_index),
1065 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1066}
1067
1068Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
1069 bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
1070 Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
1071 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
1072}
1073
Alexey Frunze19f6c692016-11-30 19:19:55 -08001074void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1075 GpuRegister out) {
1076 __ Bind(&info->pc_rel_label);
1077 // Add the high half of a 32-bit offset to PC.
1078 __ Auipc(out, /* placeholder */ 0x1234);
1079 // The immediately following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001080 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze19f6c692016-11-30 19:19:55 -08001081}
1082
David Brazdil58282f42016-01-14 12:45:10 +00001083void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001084 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1085 blocked_core_registers_[ZERO] = true;
1086 blocked_core_registers_[K0] = true;
1087 blocked_core_registers_[K1] = true;
1088 blocked_core_registers_[GP] = true;
1089 blocked_core_registers_[SP] = true;
1090 blocked_core_registers_[RA] = true;
1091
Lazar Trsicd9672662015-09-03 17:33:01 +02001092 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1093 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001094 blocked_core_registers_[AT] = true;
1095 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001096 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001097 blocked_fpu_registers_[FTMP] = true;
1098
1099 // Reserve suspend and thread registers.
1100 blocked_core_registers_[S0] = true;
1101 blocked_core_registers_[TR] = true;
1102
1103 // Reserve T9 for function calls
1104 blocked_core_registers_[T9] = true;
1105
Goran Jakovljevic782be112016-06-21 12:39:04 +02001106 if (GetGraph()->IsDebuggable()) {
1107 // Stubs do not save callee-save floating point registers. If the graph
1108 // is debuggable, we need to deal with these registers differently. For
1109 // now, just block them.
1110 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1111 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1112 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113 }
1114}
1115
Alexey Frunze4dda3372015-06-01 18:31:49 -07001116size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1117 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001118 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001119}
1120
1121size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1122 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001123 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001124}
1125
1126size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1127 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001128 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001129}
1130
1131size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1132 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001133 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001134}
1135
1136void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001137 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001138}
1139
1140void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001141 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001142}
1143
Calin Juravle175dc732015-08-25 15:42:32 +01001144void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001145 HInstruction* instruction,
1146 uint32_t dex_pc,
1147 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001148 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Serban Constantinescufc734082016-07-19 17:18:07 +01001149 __ LoadFromOffset(kLoadDoubleword,
1150 T9,
1151 TR,
1152 GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001153 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001154 __ Nop();
Serban Constantinescufc734082016-07-19 17:18:07 +01001155 if (EntrypointRequiresStackMap(entrypoint)) {
1156 RecordPcInfo(instruction, dex_pc, slow_path);
1157 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001158}
1159
1160void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1161 GpuRegister class_reg) {
1162 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1163 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1164 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
1165 // TODO: barrier needed?
1166 __ Bind(slow_path->GetExitLabel());
1167}
1168
1169void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1170 __ Sync(0); // only stype 0 is supported
1171}
1172
1173void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1174 HBasicBlock* successor) {
1175 SuspendCheckSlowPathMIPS64* slow_path =
1176 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1177 codegen_->AddSlowPath(slow_path);
1178
1179 __ LoadFromOffset(kLoadUnsignedHalfword,
1180 TMP,
1181 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001182 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001183 if (successor == nullptr) {
1184 __ Bnezc(TMP, slow_path->GetEntryLabel());
1185 __ Bind(slow_path->GetReturnLabel());
1186 } else {
1187 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001188 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001189 // slow_path will return to GetLabelOf(successor).
1190 }
1191}
1192
1193InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1194 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001195 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001196 assembler_(codegen->GetAssembler()),
1197 codegen_(codegen) {}
1198
1199void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1200 DCHECK_EQ(instruction->InputCount(), 2U);
1201 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1202 Primitive::Type type = instruction->GetResultType();
1203 switch (type) {
1204 case Primitive::kPrimInt:
1205 case Primitive::kPrimLong: {
1206 locations->SetInAt(0, Location::RequiresRegister());
1207 HInstruction* right = instruction->InputAt(1);
1208 bool can_use_imm = false;
1209 if (right->IsConstant()) {
1210 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1211 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1212 can_use_imm = IsUint<16>(imm);
1213 } else if (instruction->IsAdd()) {
1214 can_use_imm = IsInt<16>(imm);
1215 } else {
1216 DCHECK(instruction->IsSub());
1217 can_use_imm = IsInt<16>(-imm);
1218 }
1219 }
1220 if (can_use_imm)
1221 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1222 else
1223 locations->SetInAt(1, Location::RequiresRegister());
1224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1225 }
1226 break;
1227
1228 case Primitive::kPrimFloat:
1229 case Primitive::kPrimDouble:
1230 locations->SetInAt(0, Location::RequiresFpuRegister());
1231 locations->SetInAt(1, Location::RequiresFpuRegister());
1232 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1233 break;
1234
1235 default:
1236 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1237 }
1238}
1239
1240void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1241 Primitive::Type type = instruction->GetType();
1242 LocationSummary* locations = instruction->GetLocations();
1243
1244 switch (type) {
1245 case Primitive::kPrimInt:
1246 case Primitive::kPrimLong: {
1247 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1248 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1249 Location rhs_location = locations->InAt(1);
1250
1251 GpuRegister rhs_reg = ZERO;
1252 int64_t rhs_imm = 0;
1253 bool use_imm = rhs_location.IsConstant();
1254 if (use_imm) {
1255 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1256 } else {
1257 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1258 }
1259
1260 if (instruction->IsAnd()) {
1261 if (use_imm)
1262 __ Andi(dst, lhs, rhs_imm);
1263 else
1264 __ And(dst, lhs, rhs_reg);
1265 } else if (instruction->IsOr()) {
1266 if (use_imm)
1267 __ Ori(dst, lhs, rhs_imm);
1268 else
1269 __ Or(dst, lhs, rhs_reg);
1270 } else if (instruction->IsXor()) {
1271 if (use_imm)
1272 __ Xori(dst, lhs, rhs_imm);
1273 else
1274 __ Xor(dst, lhs, rhs_reg);
1275 } else if (instruction->IsAdd()) {
1276 if (type == Primitive::kPrimInt) {
1277 if (use_imm)
1278 __ Addiu(dst, lhs, rhs_imm);
1279 else
1280 __ Addu(dst, lhs, rhs_reg);
1281 } else {
1282 if (use_imm)
1283 __ Daddiu(dst, lhs, rhs_imm);
1284 else
1285 __ Daddu(dst, lhs, rhs_reg);
1286 }
1287 } else {
1288 DCHECK(instruction->IsSub());
1289 if (type == Primitive::kPrimInt) {
1290 if (use_imm)
1291 __ Addiu(dst, lhs, -rhs_imm);
1292 else
1293 __ Subu(dst, lhs, rhs_reg);
1294 } else {
1295 if (use_imm)
1296 __ Daddiu(dst, lhs, -rhs_imm);
1297 else
1298 __ Dsubu(dst, lhs, rhs_reg);
1299 }
1300 }
1301 break;
1302 }
1303 case Primitive::kPrimFloat:
1304 case Primitive::kPrimDouble: {
1305 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1306 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1307 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1308 if (instruction->IsAdd()) {
1309 if (type == Primitive::kPrimFloat)
1310 __ AddS(dst, lhs, rhs);
1311 else
1312 __ AddD(dst, lhs, rhs);
1313 } else if (instruction->IsSub()) {
1314 if (type == Primitive::kPrimFloat)
1315 __ SubS(dst, lhs, rhs);
1316 else
1317 __ SubD(dst, lhs, rhs);
1318 } else {
1319 LOG(FATAL) << "Unexpected floating-point binary operation";
1320 }
1321 break;
1322 }
1323 default:
1324 LOG(FATAL) << "Unexpected binary operation type " << type;
1325 }
1326}
1327
1328void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001329 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001330
1331 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1332 Primitive::Type type = instr->GetResultType();
1333 switch (type) {
1334 case Primitive::kPrimInt:
1335 case Primitive::kPrimLong: {
1336 locations->SetInAt(0, Location::RequiresRegister());
1337 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001338 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001339 break;
1340 }
1341 default:
1342 LOG(FATAL) << "Unexpected shift type " << type;
1343 }
1344}
1345
1346void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001347 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001348 LocationSummary* locations = instr->GetLocations();
1349 Primitive::Type type = instr->GetType();
1350
1351 switch (type) {
1352 case Primitive::kPrimInt:
1353 case Primitive::kPrimLong: {
1354 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1355 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1356 Location rhs_location = locations->InAt(1);
1357
1358 GpuRegister rhs_reg = ZERO;
1359 int64_t rhs_imm = 0;
1360 bool use_imm = rhs_location.IsConstant();
1361 if (use_imm) {
1362 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1363 } else {
1364 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1365 }
1366
1367 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001368 uint32_t shift_value = rhs_imm &
1369 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001370
Alexey Frunze92d90602015-12-18 18:16:36 -08001371 if (shift_value == 0) {
1372 if (dst != lhs) {
1373 __ Move(dst, lhs);
1374 }
1375 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001376 if (instr->IsShl()) {
1377 __ Sll(dst, lhs, shift_value);
1378 } else if (instr->IsShr()) {
1379 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001380 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001381 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001382 } else {
1383 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001384 }
1385 } else {
1386 if (shift_value < 32) {
1387 if (instr->IsShl()) {
1388 __ Dsll(dst, lhs, shift_value);
1389 } else if (instr->IsShr()) {
1390 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001391 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001392 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001393 } else {
1394 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001395 }
1396 } else {
1397 shift_value -= 32;
1398 if (instr->IsShl()) {
1399 __ Dsll32(dst, lhs, shift_value);
1400 } else if (instr->IsShr()) {
1401 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001402 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001403 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001404 } else {
1405 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001406 }
1407 }
1408 }
1409 } else {
1410 if (type == Primitive::kPrimInt) {
1411 if (instr->IsShl()) {
1412 __ Sllv(dst, lhs, rhs_reg);
1413 } else if (instr->IsShr()) {
1414 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001415 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001416 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001417 } else {
1418 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001419 }
1420 } else {
1421 if (instr->IsShl()) {
1422 __ Dsllv(dst, lhs, rhs_reg);
1423 } else if (instr->IsShr()) {
1424 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001425 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001426 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001427 } else {
1428 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001429 }
1430 }
1431 }
1432 break;
1433 }
1434 default:
1435 LOG(FATAL) << "Unexpected shift operation type " << type;
1436 }
1437}
1438
1439void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1440 HandleBinaryOp(instruction);
1441}
1442
1443void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1444 HandleBinaryOp(instruction);
1445}
1446
1447void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1448 HandleBinaryOp(instruction);
1449}
1450
1451void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1452 HandleBinaryOp(instruction);
1453}
1454
1455void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
1456 LocationSummary* locations =
1457 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1458 locations->SetInAt(0, Location::RequiresRegister());
1459 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1460 if (Primitive::IsFloatingPointType(instruction->GetType())) {
1461 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1462 } else {
1463 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1464 }
1465}
1466
1467void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
1468 LocationSummary* locations = instruction->GetLocations();
1469 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1470 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001471 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001472
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01001473 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001474 switch (type) {
1475 case Primitive::kPrimBoolean: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001476 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1477 if (index.IsConstant()) {
1478 size_t offset =
1479 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1480 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1481 } else {
1482 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1483 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
1484 }
1485 break;
1486 }
1487
1488 case Primitive::kPrimByte: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001489 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1490 if (index.IsConstant()) {
1491 size_t offset =
1492 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1493 __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1494 } else {
1495 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1496 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset);
1497 }
1498 break;
1499 }
1500
1501 case Primitive::kPrimShort: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001502 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1503 if (index.IsConstant()) {
1504 size_t offset =
1505 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1506 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1507 } else {
1508 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1509 __ Daddu(TMP, obj, TMP);
1510 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset);
1511 }
1512 break;
1513 }
1514
1515 case Primitive::kPrimChar: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001516 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1517 if (index.IsConstant()) {
1518 size_t offset =
1519 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1520 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1521 } else {
1522 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1523 __ Daddu(TMP, obj, TMP);
1524 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
1525 }
1526 break;
1527 }
1528
1529 case Primitive::kPrimInt:
1530 case Primitive::kPrimNot: {
1531 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001532 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1533 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
1534 if (index.IsConstant()) {
1535 size_t offset =
1536 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1537 __ LoadFromOffset(load_type, out, obj, offset);
1538 } else {
1539 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1540 __ Daddu(TMP, obj, TMP);
1541 __ LoadFromOffset(load_type, out, TMP, data_offset);
1542 }
1543 break;
1544 }
1545
1546 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001547 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1548 if (index.IsConstant()) {
1549 size_t offset =
1550 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1551 __ LoadFromOffset(kLoadDoubleword, out, obj, offset);
1552 } else {
1553 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1554 __ Daddu(TMP, obj, TMP);
1555 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset);
1556 }
1557 break;
1558 }
1559
1560 case Primitive::kPrimFloat: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001561 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1562 if (index.IsConstant()) {
1563 size_t offset =
1564 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1565 __ LoadFpuFromOffset(kLoadWord, out, obj, offset);
1566 } else {
1567 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1568 __ Daddu(TMP, obj, TMP);
1569 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset);
1570 }
1571 break;
1572 }
1573
1574 case Primitive::kPrimDouble: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001575 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
1576 if (index.IsConstant()) {
1577 size_t offset =
1578 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1579 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset);
1580 } else {
1581 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1582 __ Daddu(TMP, obj, TMP);
1583 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset);
1584 }
1585 break;
1586 }
1587
1588 case Primitive::kPrimVoid:
1589 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1590 UNREACHABLE();
1591 }
1592 codegen_->MaybeRecordImplicitNullCheck(instruction);
1593}
1594
1595void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
1596 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1597 locations->SetInAt(0, Location::RequiresRegister());
1598 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1599}
1600
1601void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
1602 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01001603 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001604 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1605 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
1606 __ LoadFromOffset(kLoadWord, out, obj, offset);
1607 codegen_->MaybeRecordImplicitNullCheck(instruction);
1608}
1609
1610void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
David Brazdilbb3d5052015-09-21 18:39:16 +01001611 bool needs_runtime_call = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001612 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1613 instruction,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001614 needs_runtime_call ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
David Brazdilbb3d5052015-09-21 18:39:16 +01001615 if (needs_runtime_call) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001616 InvokeRuntimeCallingConvention calling_convention;
1617 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1618 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1619 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1620 } else {
1621 locations->SetInAt(0, Location::RequiresRegister());
1622 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1623 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
1624 locations->SetInAt(2, Location::RequiresFpuRegister());
1625 } else {
1626 locations->SetInAt(2, Location::RequiresRegister());
1627 }
1628 }
1629}
1630
1631void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
1632 LocationSummary* locations = instruction->GetLocations();
1633 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1634 Location index = locations->InAt(1);
1635 Primitive::Type value_type = instruction->GetComponentType();
1636 bool needs_runtime_call = locations->WillCall();
1637 bool needs_write_barrier =
1638 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
1639
1640 switch (value_type) {
1641 case Primitive::kPrimBoolean:
1642 case Primitive::kPrimByte: {
1643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1644 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1645 if (index.IsConstant()) {
1646 size_t offset =
1647 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1648 __ StoreToOffset(kStoreByte, value, obj, offset);
1649 } else {
1650 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
1651 __ StoreToOffset(kStoreByte, value, TMP, data_offset);
1652 }
1653 break;
1654 }
1655
1656 case Primitive::kPrimShort:
1657 case Primitive::kPrimChar: {
1658 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1659 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1660 if (index.IsConstant()) {
1661 size_t offset =
1662 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1663 __ StoreToOffset(kStoreHalfword, value, obj, offset);
1664 } else {
1665 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_2);
1666 __ Daddu(TMP, obj, TMP);
1667 __ StoreToOffset(kStoreHalfword, value, TMP, data_offset);
1668 }
1669 break;
1670 }
1671
1672 case Primitive::kPrimInt:
1673 case Primitive::kPrimNot: {
1674 if (!needs_runtime_call) {
1675 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1676 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1677 if (index.IsConstant()) {
1678 size_t offset =
1679 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1680 __ StoreToOffset(kStoreWord, value, obj, offset);
1681 } else {
1682 DCHECK(index.IsRegister()) << index;
1683 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1684 __ Daddu(TMP, obj, TMP);
1685 __ StoreToOffset(kStoreWord, value, TMP, data_offset);
1686 }
1687 codegen_->MaybeRecordImplicitNullCheck(instruction);
1688 if (needs_write_barrier) {
1689 DCHECK_EQ(value_type, Primitive::kPrimNot);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001690 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001691 }
1692 } else {
1693 DCHECK_EQ(value_type, Primitive::kPrimNot);
Serban Constantinescufc734082016-07-19 17:18:07 +01001694 codegen_->InvokeRuntime(kQuickAputObject, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00001695 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001696 }
1697 break;
1698 }
1699
1700 case Primitive::kPrimLong: {
1701 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1702 GpuRegister value = locations->InAt(2).AsRegister<GpuRegister>();
1703 if (index.IsConstant()) {
1704 size_t offset =
1705 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1706 __ StoreToOffset(kStoreDoubleword, value, obj, offset);
1707 } else {
1708 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1709 __ Daddu(TMP, obj, TMP);
1710 __ StoreToOffset(kStoreDoubleword, value, TMP, data_offset);
1711 }
1712 break;
1713 }
1714
1715 case Primitive::kPrimFloat: {
1716 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
1717 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1718 DCHECK(locations->InAt(2).IsFpuRegister());
1719 if (index.IsConstant()) {
1720 size_t offset =
1721 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1722 __ StoreFpuToOffset(kStoreWord, value, obj, offset);
1723 } else {
1724 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_4);
1725 __ Daddu(TMP, obj, TMP);
1726 __ StoreFpuToOffset(kStoreWord, value, TMP, data_offset);
1727 }
1728 break;
1729 }
1730
1731 case Primitive::kPrimDouble: {
1732 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
1733 FpuRegister value = locations->InAt(2).AsFpuRegister<FpuRegister>();
1734 DCHECK(locations->InAt(2).IsFpuRegister());
1735 if (index.IsConstant()) {
1736 size_t offset =
1737 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1738 __ StoreFpuToOffset(kStoreDoubleword, value, obj, offset);
1739 } else {
1740 __ Dsll(TMP, index.AsRegister<GpuRegister>(), TIMES_8);
1741 __ Daddu(TMP, obj, TMP);
1742 __ StoreFpuToOffset(kStoreDoubleword, value, TMP, data_offset);
1743 }
1744 break;
1745 }
1746
1747 case Primitive::kPrimVoid:
1748 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1749 UNREACHABLE();
1750 }
1751
1752 // Ints and objects are handled in the switch.
1753 if (value_type != Primitive::kPrimInt && value_type != Primitive::kPrimNot) {
1754 codegen_->MaybeRecordImplicitNullCheck(instruction);
1755 }
1756}
1757
1758void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001759 RegisterSet caller_saves = RegisterSet::Empty();
1760 InvokeRuntimeCallingConvention calling_convention;
1761 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1762 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1763 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001764 locations->SetInAt(0, Location::RequiresRegister());
1765 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001766}
1767
1768void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
1769 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001770 BoundsCheckSlowPathMIPS64* slow_path =
1771 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001772 codegen_->AddSlowPath(slow_path);
1773
1774 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
1775 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
1776
1777 // length is limited by the maximum positive signed 32-bit integer.
1778 // Unsigned comparison of length and index checks for index < 0
1779 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001780 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001781}
1782
1783void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
1784 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1785 instruction,
1786 LocationSummary::kCallOnSlowPath);
1787 locations->SetInAt(0, Location::RequiresRegister());
1788 locations->SetInAt(1, Location::RequiresRegister());
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001789 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001790 locations->AddTemp(Location::RequiresRegister());
1791}
1792
1793void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
1794 LocationSummary* locations = instruction->GetLocations();
1795 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
1796 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
1797 GpuRegister obj_cls = locations->GetTemp(0).AsRegister<GpuRegister>();
1798
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01001799 SlowPathCodeMIPS64* slow_path =
1800 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001801 codegen_->AddSlowPath(slow_path);
1802
1803 // TODO: avoid this check if we know obj is not null.
1804 __ Beqzc(obj, slow_path->GetExitLabel());
1805 // Compare the class of `obj` with `cls`.
1806 __ LoadFromOffset(kLoadUnsignedWord, obj_cls, obj, mirror::Object::ClassOffset().Int32Value());
1807 __ Bnec(obj_cls, cls, slow_path->GetEntryLabel());
1808 __ Bind(slow_path->GetExitLabel());
1809}
1810
1811void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
1812 LocationSummary* locations =
1813 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
1814 locations->SetInAt(0, Location::RequiresRegister());
1815 if (check->HasUses()) {
1816 locations->SetOut(Location::SameAsFirstInput());
1817 }
1818}
1819
1820void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
1821 // We assume the class is not null.
1822 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
1823 check->GetLoadClass(),
1824 check,
1825 check->GetDexPc(),
1826 true);
1827 codegen_->AddSlowPath(slow_path);
1828 GenerateClassInitializationCheck(slow_path,
1829 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
1830}
1831
1832void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
1833 Primitive::Type in_type = compare->InputAt(0)->GetType();
1834
Alexey Frunze299a9392015-12-08 16:08:02 -08001835 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001836
1837 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001838 case Primitive::kPrimBoolean:
1839 case Primitive::kPrimByte:
1840 case Primitive::kPrimShort:
1841 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001842 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001843 case Primitive::kPrimLong:
1844 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001845 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001846 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1847 break;
1848
1849 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08001850 case Primitive::kPrimDouble:
1851 locations->SetInAt(0, Location::RequiresFpuRegister());
1852 locations->SetInAt(1, Location::RequiresFpuRegister());
1853 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001854 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001855
1856 default:
1857 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
1858 }
1859}
1860
1861void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
1862 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08001863 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001864 Primitive::Type in_type = instruction->InputAt(0)->GetType();
1865
1866 // 0 if: left == right
1867 // 1 if: left > right
1868 // -1 if: left < right
1869 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001870 case Primitive::kPrimBoolean:
1871 case Primitive::kPrimByte:
1872 case Primitive::kPrimShort:
1873 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08001874 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001875 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001876 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001877 Location rhs_location = locations->InAt(1);
1878 bool use_imm = rhs_location.IsConstant();
1879 GpuRegister rhs = ZERO;
1880 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00001881 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08001882 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
1883 if (value != 0) {
1884 rhs = AT;
1885 __ LoadConst64(rhs, value);
1886 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00001887 } else {
1888 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
1889 if (value != 0) {
1890 rhs = AT;
1891 __ LoadConst32(rhs, value);
1892 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001893 }
1894 } else {
1895 rhs = rhs_location.AsRegister<GpuRegister>();
1896 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001897 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08001898 __ Slt(res, rhs, lhs);
1899 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001900 break;
1901 }
1902
Alexey Frunze299a9392015-12-08 16:08:02 -08001903 case Primitive::kPrimFloat: {
1904 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1905 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1906 Mips64Label done;
1907 __ CmpEqS(FTMP, lhs, rhs);
1908 __ LoadConst32(res, 0);
1909 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001910 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001911 __ CmpLtS(FTMP, lhs, rhs);
1912 __ LoadConst32(res, -1);
1913 __ Bc1nez(FTMP, &done);
1914 __ LoadConst32(res, 1);
1915 } else {
1916 __ CmpLtS(FTMP, rhs, lhs);
1917 __ LoadConst32(res, 1);
1918 __ Bc1nez(FTMP, &done);
1919 __ LoadConst32(res, -1);
1920 }
1921 __ Bind(&done);
1922 break;
1923 }
1924
Alexey Frunze4dda3372015-06-01 18:31:49 -07001925 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08001926 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1927 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1928 Mips64Label done;
1929 __ CmpEqD(FTMP, lhs, rhs);
1930 __ LoadConst32(res, 0);
1931 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00001932 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08001933 __ CmpLtD(FTMP, lhs, rhs);
1934 __ LoadConst32(res, -1);
1935 __ Bc1nez(FTMP, &done);
1936 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001937 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08001938 __ CmpLtD(FTMP, rhs, lhs);
1939 __ LoadConst32(res, 1);
1940 __ Bc1nez(FTMP, &done);
1941 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001942 }
Alexey Frunze299a9392015-12-08 16:08:02 -08001943 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001944 break;
1945 }
1946
1947 default:
1948 LOG(FATAL) << "Unimplemented compare type " << in_type;
1949 }
1950}
1951
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001952void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001953 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08001954 switch (instruction->InputAt(0)->GetType()) {
1955 default:
1956 case Primitive::kPrimLong:
1957 locations->SetInAt(0, Location::RequiresRegister());
1958 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1959 break;
1960
1961 case Primitive::kPrimFloat:
1962 case Primitive::kPrimDouble:
1963 locations->SetInAt(0, Location::RequiresFpuRegister());
1964 locations->SetInAt(1, Location::RequiresFpuRegister());
1965 break;
1966 }
David Brazdilb3e773e2016-01-26 11:28:37 +00001967 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001968 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1969 }
1970}
1971
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00001972void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00001973 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001974 return;
1975 }
1976
Alexey Frunze299a9392015-12-08 16:08:02 -08001977 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001978 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001979 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze299a9392015-12-08 16:08:02 -08001980 Mips64Label true_label;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001981
Alexey Frunze299a9392015-12-08 16:08:02 -08001982 switch (type) {
1983 default:
1984 // Integer case.
1985 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
1986 return;
1987 case Primitive::kPrimLong:
1988 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
1989 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001990
Alexey Frunze299a9392015-12-08 16:08:02 -08001991 case Primitive::kPrimFloat:
1992 case Primitive::kPrimDouble:
1993 // TODO: don't use branches.
1994 GenerateFpCompareAndBranch(instruction->GetCondition(),
1995 instruction->IsGtBias(),
1996 type,
1997 locations,
1998 &true_label);
Aart Bike9f37602015-10-09 11:15:55 -07001999 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002000 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002001
2002 // Convert the branches into the result.
2003 Mips64Label done;
2004
2005 // False case: result = 0.
2006 __ LoadConst32(dst, 0);
2007 __ Bc(&done);
2008
2009 // True case: result = 1.
2010 __ Bind(&true_label);
2011 __ LoadConst32(dst, 1);
2012 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002013}
2014
Alexey Frunzec857c742015-09-23 15:12:39 -07002015void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2016 DCHECK(instruction->IsDiv() || instruction->IsRem());
2017 Primitive::Type type = instruction->GetResultType();
2018
2019 LocationSummary* locations = instruction->GetLocations();
2020 Location second = locations->InAt(1);
2021 DCHECK(second.IsConstant());
2022
2023 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2024 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2025 int64_t imm = Int64FromConstant(second.GetConstant());
2026 DCHECK(imm == 1 || imm == -1);
2027
2028 if (instruction->IsRem()) {
2029 __ Move(out, ZERO);
2030 } else {
2031 if (imm == -1) {
2032 if (type == Primitive::kPrimInt) {
2033 __ Subu(out, ZERO, dividend);
2034 } else {
2035 DCHECK_EQ(type, Primitive::kPrimLong);
2036 __ Dsubu(out, ZERO, dividend);
2037 }
2038 } else if (out != dividend) {
2039 __ Move(out, dividend);
2040 }
2041 }
2042}
2043
2044void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2045 DCHECK(instruction->IsDiv() || instruction->IsRem());
2046 Primitive::Type type = instruction->GetResultType();
2047
2048 LocationSummary* locations = instruction->GetLocations();
2049 Location second = locations->InAt(1);
2050 DCHECK(second.IsConstant());
2051
2052 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2053 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2054 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002055 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07002056 int ctz_imm = CTZ(abs_imm);
2057
2058 if (instruction->IsDiv()) {
2059 if (type == Primitive::kPrimInt) {
2060 if (ctz_imm == 1) {
2061 // Fast path for division by +/-2, which is very common.
2062 __ Srl(TMP, dividend, 31);
2063 } else {
2064 __ Sra(TMP, dividend, 31);
2065 __ Srl(TMP, TMP, 32 - ctz_imm);
2066 }
2067 __ Addu(out, dividend, TMP);
2068 __ Sra(out, out, ctz_imm);
2069 if (imm < 0) {
2070 __ Subu(out, ZERO, out);
2071 }
2072 } else {
2073 DCHECK_EQ(type, Primitive::kPrimLong);
2074 if (ctz_imm == 1) {
2075 // Fast path for division by +/-2, which is very common.
2076 __ Dsrl32(TMP, dividend, 31);
2077 } else {
2078 __ Dsra32(TMP, dividend, 31);
2079 if (ctz_imm > 32) {
2080 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2081 } else {
2082 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2083 }
2084 }
2085 __ Daddu(out, dividend, TMP);
2086 if (ctz_imm < 32) {
2087 __ Dsra(out, out, ctz_imm);
2088 } else {
2089 __ Dsra32(out, out, ctz_imm - 32);
2090 }
2091 if (imm < 0) {
2092 __ Dsubu(out, ZERO, out);
2093 }
2094 }
2095 } else {
2096 if (type == Primitive::kPrimInt) {
2097 if (ctz_imm == 1) {
2098 // Fast path for modulo +/-2, which is very common.
2099 __ Sra(TMP, dividend, 31);
2100 __ Subu(out, dividend, TMP);
2101 __ Andi(out, out, 1);
2102 __ Addu(out, out, TMP);
2103 } else {
2104 __ Sra(TMP, dividend, 31);
2105 __ Srl(TMP, TMP, 32 - ctz_imm);
2106 __ Addu(out, dividend, TMP);
2107 if (IsUint<16>(abs_imm - 1)) {
2108 __ Andi(out, out, abs_imm - 1);
2109 } else {
2110 __ Sll(out, out, 32 - ctz_imm);
2111 __ Srl(out, out, 32 - ctz_imm);
2112 }
2113 __ Subu(out, out, TMP);
2114 }
2115 } else {
2116 DCHECK_EQ(type, Primitive::kPrimLong);
2117 if (ctz_imm == 1) {
2118 // Fast path for modulo +/-2, which is very common.
2119 __ Dsra32(TMP, dividend, 31);
2120 __ Dsubu(out, dividend, TMP);
2121 __ Andi(out, out, 1);
2122 __ Daddu(out, out, TMP);
2123 } else {
2124 __ Dsra32(TMP, dividend, 31);
2125 if (ctz_imm > 32) {
2126 __ Dsrl(TMP, TMP, 64 - ctz_imm);
2127 } else {
2128 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
2129 }
2130 __ Daddu(out, dividend, TMP);
2131 if (IsUint<16>(abs_imm - 1)) {
2132 __ Andi(out, out, abs_imm - 1);
2133 } else {
2134 if (ctz_imm > 32) {
2135 __ Dsll(out, out, 64 - ctz_imm);
2136 __ Dsrl(out, out, 64 - ctz_imm);
2137 } else {
2138 __ Dsll32(out, out, 32 - ctz_imm);
2139 __ Dsrl32(out, out, 32 - ctz_imm);
2140 }
2141 }
2142 __ Dsubu(out, out, TMP);
2143 }
2144 }
2145 }
2146}
2147
2148void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
2149 DCHECK(instruction->IsDiv() || instruction->IsRem());
2150
2151 LocationSummary* locations = instruction->GetLocations();
2152 Location second = locations->InAt(1);
2153 DCHECK(second.IsConstant());
2154
2155 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2156 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2157 int64_t imm = Int64FromConstant(second.GetConstant());
2158
2159 Primitive::Type type = instruction->GetResultType();
2160 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2161
2162 int64_t magic;
2163 int shift;
2164 CalculateMagicAndShiftForDivRem(imm,
2165 (type == Primitive::kPrimLong),
2166 &magic,
2167 &shift);
2168
2169 if (type == Primitive::kPrimInt) {
2170 __ LoadConst32(TMP, magic);
2171 __ MuhR6(TMP, dividend, TMP);
2172
2173 if (imm > 0 && magic < 0) {
2174 __ Addu(TMP, TMP, dividend);
2175 } else if (imm < 0 && magic > 0) {
2176 __ Subu(TMP, TMP, dividend);
2177 }
2178
2179 if (shift != 0) {
2180 __ Sra(TMP, TMP, shift);
2181 }
2182
2183 if (instruction->IsDiv()) {
2184 __ Sra(out, TMP, 31);
2185 __ Subu(out, TMP, out);
2186 } else {
2187 __ Sra(AT, TMP, 31);
2188 __ Subu(AT, TMP, AT);
2189 __ LoadConst32(TMP, imm);
2190 __ MulR6(TMP, AT, TMP);
2191 __ Subu(out, dividend, TMP);
2192 }
2193 } else {
2194 __ LoadConst64(TMP, magic);
2195 __ Dmuh(TMP, dividend, TMP);
2196
2197 if (imm > 0 && magic < 0) {
2198 __ Daddu(TMP, TMP, dividend);
2199 } else if (imm < 0 && magic > 0) {
2200 __ Dsubu(TMP, TMP, dividend);
2201 }
2202
2203 if (shift >= 32) {
2204 __ Dsra32(TMP, TMP, shift - 32);
2205 } else if (shift > 0) {
2206 __ Dsra(TMP, TMP, shift);
2207 }
2208
2209 if (instruction->IsDiv()) {
2210 __ Dsra32(out, TMP, 31);
2211 __ Dsubu(out, TMP, out);
2212 } else {
2213 __ Dsra32(AT, TMP, 31);
2214 __ Dsubu(AT, TMP, AT);
2215 __ LoadConst64(TMP, imm);
2216 __ Dmul(TMP, AT, TMP);
2217 __ Dsubu(out, dividend, TMP);
2218 }
2219 }
2220}
2221
2222void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2223 DCHECK(instruction->IsDiv() || instruction->IsRem());
2224 Primitive::Type type = instruction->GetResultType();
2225 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
2226
2227 LocationSummary* locations = instruction->GetLocations();
2228 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2229 Location second = locations->InAt(1);
2230
2231 if (second.IsConstant()) {
2232 int64_t imm = Int64FromConstant(second.GetConstant());
2233 if (imm == 0) {
2234 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
2235 } else if (imm == 1 || imm == -1) {
2236 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00002237 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07002238 DivRemByPowerOfTwo(instruction);
2239 } else {
2240 DCHECK(imm <= -2 || imm >= 2);
2241 GenerateDivRemWithAnyConstant(instruction);
2242 }
2243 } else {
2244 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2245 GpuRegister divisor = second.AsRegister<GpuRegister>();
2246 if (instruction->IsDiv()) {
2247 if (type == Primitive::kPrimInt)
2248 __ DivR6(out, dividend, divisor);
2249 else
2250 __ Ddiv(out, dividend, divisor);
2251 } else {
2252 if (type == Primitive::kPrimInt)
2253 __ ModR6(out, dividend, divisor);
2254 else
2255 __ Dmod(out, dividend, divisor);
2256 }
2257 }
2258}
2259
Alexey Frunze4dda3372015-06-01 18:31:49 -07002260void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
2261 LocationSummary* locations =
2262 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
2263 switch (div->GetResultType()) {
2264 case Primitive::kPrimInt:
2265 case Primitive::kPrimLong:
2266 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07002267 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002268 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2269 break;
2270
2271 case Primitive::kPrimFloat:
2272 case Primitive::kPrimDouble:
2273 locations->SetInAt(0, Location::RequiresFpuRegister());
2274 locations->SetInAt(1, Location::RequiresFpuRegister());
2275 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2276 break;
2277
2278 default:
2279 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2280 }
2281}
2282
2283void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
2284 Primitive::Type type = instruction->GetType();
2285 LocationSummary* locations = instruction->GetLocations();
2286
2287 switch (type) {
2288 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07002289 case Primitive::kPrimLong:
2290 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002291 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002292 case Primitive::kPrimFloat:
2293 case Primitive::kPrimDouble: {
2294 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2295 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2296 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2297 if (type == Primitive::kPrimFloat)
2298 __ DivS(dst, lhs, rhs);
2299 else
2300 __ DivD(dst, lhs, rhs);
2301 break;
2302 }
2303 default:
2304 LOG(FATAL) << "Unexpected div type " << type;
2305 }
2306}
2307
2308void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002309 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002311}
2312
2313void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2314 SlowPathCodeMIPS64* slow_path =
2315 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
2316 codegen_->AddSlowPath(slow_path);
2317 Location value = instruction->GetLocations()->InAt(0);
2318
2319 Primitive::Type type = instruction->GetType();
2320
Nicolas Geoffraye5671612016-03-16 11:03:54 +00002321 if (!Primitive::IsIntegralType(type)) {
2322 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06002323 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002324 }
2325
2326 if (value.IsConstant()) {
2327 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
2328 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002329 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002330 } else {
2331 // A division by a non-null constant is valid. We don't need to perform
2332 // any check, so simply fall through.
2333 }
2334 } else {
2335 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
2336 }
2337}
2338
2339void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
2340 LocationSummary* locations =
2341 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2342 locations->SetOut(Location::ConstantLocation(constant));
2343}
2344
2345void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
2346 // Will be generated at use site.
2347}
2348
2349void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
2350 exit->SetLocations(nullptr);
2351}
2352
2353void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
2354}
2355
2356void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
2357 LocationSummary* locations =
2358 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
2359 locations->SetOut(Location::ConstantLocation(constant));
2360}
2361
2362void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
2363 // Will be generated at use site.
2364}
2365
David Brazdilfc6a86a2015-06-26 10:33:45 +00002366void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002367 DCHECK(!successor->IsExitBlock());
2368 HBasicBlock* block = got->GetBlock();
2369 HInstruction* previous = got->GetPrevious();
2370 HLoopInformation* info = block->GetLoopInformation();
2371
2372 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
2373 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
2374 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
2375 return;
2376 }
2377 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
2378 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
2379 }
2380 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002381 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002382 }
2383}
2384
David Brazdilfc6a86a2015-06-26 10:33:45 +00002385void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
2386 got->SetLocations(nullptr);
2387}
2388
2389void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
2390 HandleGoto(got, got->GetSuccessor());
2391}
2392
2393void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2394 try_boundary->SetLocations(nullptr);
2395}
2396
2397void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
2398 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
2399 if (!successor->IsExitBlock()) {
2400 HandleGoto(try_boundary, successor);
2401 }
2402}
2403
Alexey Frunze299a9392015-12-08 16:08:02 -08002404void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
2405 bool is64bit,
2406 LocationSummary* locations) {
2407 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2408 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2409 Location rhs_location = locations->InAt(1);
2410 GpuRegister rhs_reg = ZERO;
2411 int64_t rhs_imm = 0;
2412 bool use_imm = rhs_location.IsConstant();
2413 if (use_imm) {
2414 if (is64bit) {
2415 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2416 } else {
2417 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2418 }
2419 } else {
2420 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2421 }
2422 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
2423
2424 switch (cond) {
2425 case kCondEQ:
2426 case kCondNE:
2427 if (use_imm && IsUint<16>(rhs_imm)) {
2428 __ Xori(dst, lhs, rhs_imm);
2429 } else {
2430 if (use_imm) {
2431 rhs_reg = TMP;
2432 __ LoadConst64(rhs_reg, rhs_imm);
2433 }
2434 __ Xor(dst, lhs, rhs_reg);
2435 }
2436 if (cond == kCondEQ) {
2437 __ Sltiu(dst, dst, 1);
2438 } else {
2439 __ Sltu(dst, ZERO, dst);
2440 }
2441 break;
2442
2443 case kCondLT:
2444 case kCondGE:
2445 if (use_imm && IsInt<16>(rhs_imm)) {
2446 __ Slti(dst, lhs, rhs_imm);
2447 } else {
2448 if (use_imm) {
2449 rhs_reg = TMP;
2450 __ LoadConst64(rhs_reg, rhs_imm);
2451 }
2452 __ Slt(dst, lhs, rhs_reg);
2453 }
2454 if (cond == kCondGE) {
2455 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2456 // only the slt instruction but no sge.
2457 __ Xori(dst, dst, 1);
2458 }
2459 break;
2460
2461 case kCondLE:
2462 case kCondGT:
2463 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
2464 // Simulate lhs <= rhs via lhs < rhs + 1.
2465 __ Slti(dst, lhs, rhs_imm_plus_one);
2466 if (cond == kCondGT) {
2467 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2468 // only the slti instruction but no sgti.
2469 __ Xori(dst, dst, 1);
2470 }
2471 } else {
2472 if (use_imm) {
2473 rhs_reg = TMP;
2474 __ LoadConst64(rhs_reg, rhs_imm);
2475 }
2476 __ Slt(dst, rhs_reg, lhs);
2477 if (cond == kCondLE) {
2478 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2479 // only the slt instruction but no sle.
2480 __ Xori(dst, dst, 1);
2481 }
2482 }
2483 break;
2484
2485 case kCondB:
2486 case kCondAE:
2487 if (use_imm && IsInt<16>(rhs_imm)) {
2488 // Sltiu sign-extends its 16-bit immediate operand before
2489 // the comparison and thus lets us compare directly with
2490 // unsigned values in the ranges [0, 0x7fff] and
2491 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2492 __ Sltiu(dst, lhs, rhs_imm);
2493 } else {
2494 if (use_imm) {
2495 rhs_reg = TMP;
2496 __ LoadConst64(rhs_reg, rhs_imm);
2497 }
2498 __ Sltu(dst, lhs, rhs_reg);
2499 }
2500 if (cond == kCondAE) {
2501 // Simulate lhs >= rhs via !(lhs < rhs) since there's
2502 // only the sltu instruction but no sgeu.
2503 __ Xori(dst, dst, 1);
2504 }
2505 break;
2506
2507 case kCondBE:
2508 case kCondA:
2509 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
2510 // Simulate lhs <= rhs via lhs < rhs + 1.
2511 // Note that this only works if rhs + 1 does not overflow
2512 // to 0, hence the check above.
2513 // Sltiu sign-extends its 16-bit immediate operand before
2514 // the comparison and thus lets us compare directly with
2515 // unsigned values in the ranges [0, 0x7fff] and
2516 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
2517 __ Sltiu(dst, lhs, rhs_imm_plus_one);
2518 if (cond == kCondA) {
2519 // Simulate lhs > rhs via !(lhs <= rhs) since there's
2520 // only the sltiu instruction but no sgtiu.
2521 __ Xori(dst, dst, 1);
2522 }
2523 } else {
2524 if (use_imm) {
2525 rhs_reg = TMP;
2526 __ LoadConst64(rhs_reg, rhs_imm);
2527 }
2528 __ Sltu(dst, rhs_reg, lhs);
2529 if (cond == kCondBE) {
2530 // Simulate lhs <= rhs via !(rhs < lhs) since there's
2531 // only the sltu instruction but no sleu.
2532 __ Xori(dst, dst, 1);
2533 }
2534 }
2535 break;
2536 }
2537}
2538
2539void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
2540 bool is64bit,
2541 LocationSummary* locations,
2542 Mips64Label* label) {
2543 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2544 Location rhs_location = locations->InAt(1);
2545 GpuRegister rhs_reg = ZERO;
2546 int64_t rhs_imm = 0;
2547 bool use_imm = rhs_location.IsConstant();
2548 if (use_imm) {
2549 if (is64bit) {
2550 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2551 } else {
2552 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2553 }
2554 } else {
2555 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2556 }
2557
2558 if (use_imm && rhs_imm == 0) {
2559 switch (cond) {
2560 case kCondEQ:
2561 case kCondBE: // <= 0 if zero
2562 __ Beqzc(lhs, label);
2563 break;
2564 case kCondNE:
2565 case kCondA: // > 0 if non-zero
2566 __ Bnezc(lhs, label);
2567 break;
2568 case kCondLT:
2569 __ Bltzc(lhs, label);
2570 break;
2571 case kCondGE:
2572 __ Bgezc(lhs, label);
2573 break;
2574 case kCondLE:
2575 __ Blezc(lhs, label);
2576 break;
2577 case kCondGT:
2578 __ Bgtzc(lhs, label);
2579 break;
2580 case kCondB: // always false
2581 break;
2582 case kCondAE: // always true
2583 __ Bc(label);
2584 break;
2585 }
2586 } else {
2587 if (use_imm) {
2588 rhs_reg = TMP;
2589 __ LoadConst64(rhs_reg, rhs_imm);
2590 }
2591 switch (cond) {
2592 case kCondEQ:
2593 __ Beqc(lhs, rhs_reg, label);
2594 break;
2595 case kCondNE:
2596 __ Bnec(lhs, rhs_reg, label);
2597 break;
2598 case kCondLT:
2599 __ Bltc(lhs, rhs_reg, label);
2600 break;
2601 case kCondGE:
2602 __ Bgec(lhs, rhs_reg, label);
2603 break;
2604 case kCondLE:
2605 __ Bgec(rhs_reg, lhs, label);
2606 break;
2607 case kCondGT:
2608 __ Bltc(rhs_reg, lhs, label);
2609 break;
2610 case kCondB:
2611 __ Bltuc(lhs, rhs_reg, label);
2612 break;
2613 case kCondAE:
2614 __ Bgeuc(lhs, rhs_reg, label);
2615 break;
2616 case kCondBE:
2617 __ Bgeuc(rhs_reg, lhs, label);
2618 break;
2619 case kCondA:
2620 __ Bltuc(rhs_reg, lhs, label);
2621 break;
2622 }
2623 }
2624}
2625
2626void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
2627 bool gt_bias,
2628 Primitive::Type type,
2629 LocationSummary* locations,
2630 Mips64Label* label) {
2631 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2632 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2633 if (type == Primitive::kPrimFloat) {
2634 switch (cond) {
2635 case kCondEQ:
2636 __ CmpEqS(FTMP, lhs, rhs);
2637 __ Bc1nez(FTMP, label);
2638 break;
2639 case kCondNE:
2640 __ CmpEqS(FTMP, lhs, rhs);
2641 __ Bc1eqz(FTMP, label);
2642 break;
2643 case kCondLT:
2644 if (gt_bias) {
2645 __ CmpLtS(FTMP, lhs, rhs);
2646 } else {
2647 __ CmpUltS(FTMP, lhs, rhs);
2648 }
2649 __ Bc1nez(FTMP, label);
2650 break;
2651 case kCondLE:
2652 if (gt_bias) {
2653 __ CmpLeS(FTMP, lhs, rhs);
2654 } else {
2655 __ CmpUleS(FTMP, lhs, rhs);
2656 }
2657 __ Bc1nez(FTMP, label);
2658 break;
2659 case kCondGT:
2660 if (gt_bias) {
2661 __ CmpUltS(FTMP, rhs, lhs);
2662 } else {
2663 __ CmpLtS(FTMP, rhs, lhs);
2664 }
2665 __ Bc1nez(FTMP, label);
2666 break;
2667 case kCondGE:
2668 if (gt_bias) {
2669 __ CmpUleS(FTMP, rhs, lhs);
2670 } else {
2671 __ CmpLeS(FTMP, rhs, lhs);
2672 }
2673 __ Bc1nez(FTMP, label);
2674 break;
2675 default:
2676 LOG(FATAL) << "Unexpected non-floating-point condition";
2677 }
2678 } else {
2679 DCHECK_EQ(type, Primitive::kPrimDouble);
2680 switch (cond) {
2681 case kCondEQ:
2682 __ CmpEqD(FTMP, lhs, rhs);
2683 __ Bc1nez(FTMP, label);
2684 break;
2685 case kCondNE:
2686 __ CmpEqD(FTMP, lhs, rhs);
2687 __ Bc1eqz(FTMP, label);
2688 break;
2689 case kCondLT:
2690 if (gt_bias) {
2691 __ CmpLtD(FTMP, lhs, rhs);
2692 } else {
2693 __ CmpUltD(FTMP, lhs, rhs);
2694 }
2695 __ Bc1nez(FTMP, label);
2696 break;
2697 case kCondLE:
2698 if (gt_bias) {
2699 __ CmpLeD(FTMP, lhs, rhs);
2700 } else {
2701 __ CmpUleD(FTMP, lhs, rhs);
2702 }
2703 __ Bc1nez(FTMP, label);
2704 break;
2705 case kCondGT:
2706 if (gt_bias) {
2707 __ CmpUltD(FTMP, rhs, lhs);
2708 } else {
2709 __ CmpLtD(FTMP, rhs, lhs);
2710 }
2711 __ Bc1nez(FTMP, label);
2712 break;
2713 case kCondGE:
2714 if (gt_bias) {
2715 __ CmpUleD(FTMP, rhs, lhs);
2716 } else {
2717 __ CmpLeD(FTMP, rhs, lhs);
2718 }
2719 __ Bc1nez(FTMP, label);
2720 break;
2721 default:
2722 LOG(FATAL) << "Unexpected non-floating-point condition";
2723 }
2724 }
2725}
2726
Alexey Frunze4dda3372015-06-01 18:31:49 -07002727void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00002728 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002729 Mips64Label* true_target,
2730 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00002731 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002732
David Brazdil0debae72015-11-12 18:37:00 +00002733 if (true_target == nullptr && false_target == nullptr) {
2734 // Nothing to do. The code always falls through.
2735 return;
2736 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00002737 // Constant condition, statically compared against "true" (integer value 1).
2738 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00002739 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002740 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002741 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002742 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00002743 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00002744 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002745 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00002746 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002747 }
David Brazdil0debae72015-11-12 18:37:00 +00002748 return;
2749 }
2750
2751 // The following code generates these patterns:
2752 // (1) true_target == nullptr && false_target != nullptr
2753 // - opposite condition true => branch to false_target
2754 // (2) true_target != nullptr && false_target == nullptr
2755 // - condition true => branch to true_target
2756 // (3) true_target != nullptr && false_target != nullptr
2757 // - condition true => branch to true_target
2758 // - branch to false_target
2759 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002760 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00002761 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002762 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00002763 if (true_target == nullptr) {
2764 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
2765 } else {
2766 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
2767 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002768 } else {
2769 // The condition instruction has not been materialized, use its inputs as
2770 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00002771 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002772 Primitive::Type type = condition->InputAt(0)->GetType();
2773 LocationSummary* locations = cond->GetLocations();
2774 IfCondition if_cond = condition->GetCondition();
2775 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00002776
David Brazdil0debae72015-11-12 18:37:00 +00002777 if (true_target == nullptr) {
2778 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08002779 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00002780 }
2781
Alexey Frunze299a9392015-12-08 16:08:02 -08002782 switch (type) {
2783 default:
2784 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
2785 break;
2786 case Primitive::kPrimLong:
2787 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
2788 break;
2789 case Primitive::kPrimFloat:
2790 case Primitive::kPrimDouble:
2791 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
2792 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002793 }
2794 }
David Brazdil0debae72015-11-12 18:37:00 +00002795
2796 // If neither branch falls through (case 3), the conditional branch to `true_target`
2797 // was already emitted (case 2) and we need to emit a jump to `false_target`.
2798 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002799 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002800 }
2801}
2802
2803void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
2804 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00002805 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002806 locations->SetInAt(0, Location::RequiresRegister());
2807 }
2808}
2809
2810void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00002811 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
2812 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002813 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002814 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002815 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00002816 nullptr : codegen_->GetLabelOf(false_successor);
2817 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002818}
2819
2820void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
2821 LocationSummary* locations = new (GetGraph()->GetArena())
2822 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01002823 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00002824 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002825 locations->SetInAt(0, Location::RequiresRegister());
2826 }
2827}
2828
2829void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08002830 SlowPathCodeMIPS64* slow_path =
2831 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00002832 GenerateTestAndBranch(deoptimize,
2833 /* condition_input_index */ 0,
2834 slow_path->GetEntryLabel(),
2835 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002836}
2837
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002838void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2839 LocationSummary* locations = new (GetGraph()->GetArena())
2840 LocationSummary(flag, LocationSummary::kNoCall);
2841 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07002842}
2843
Goran Jakovljevicc6418422016-12-05 16:31:55 +01002844void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
2845 __ LoadFromOffset(kLoadWord,
2846 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
2847 SP,
2848 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07002849}
2850
David Brazdil74eb1b22015-12-14 11:44:01 +00002851void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
2852 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
2853 if (Primitive::IsFloatingPointType(select->GetType())) {
2854 locations->SetInAt(0, Location::RequiresFpuRegister());
2855 locations->SetInAt(1, Location::RequiresFpuRegister());
2856 } else {
2857 locations->SetInAt(0, Location::RequiresRegister());
2858 locations->SetInAt(1, Location::RequiresRegister());
2859 }
2860 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
2861 locations->SetInAt(2, Location::RequiresRegister());
2862 }
2863 locations->SetOut(Location::SameAsFirstInput());
2864}
2865
2866void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
2867 LocationSummary* locations = select->GetLocations();
2868 Mips64Label false_target;
2869 GenerateTestAndBranch(select,
2870 /* condition_input_index */ 2,
2871 /* true_target */ nullptr,
2872 &false_target);
2873 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
2874 __ Bind(&false_target);
2875}
2876
David Srbecky0cf44932015-12-09 14:09:59 +00002877void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
2878 new (GetGraph()->GetArena()) LocationSummary(info);
2879}
2880
David Srbeckyd28f4a02016-03-14 17:14:24 +00002881void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
2882 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00002883}
2884
2885void CodeGeneratorMIPS64::GenerateNop() {
2886 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00002887}
2888
Alexey Frunze4dda3372015-06-01 18:31:49 -07002889void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
2890 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2891 LocationSummary* locations =
2892 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2893 locations->SetInAt(0, Location::RequiresRegister());
2894 if (Primitive::IsFloatingPointType(instruction->GetType())) {
2895 locations->SetOut(Location::RequiresFpuRegister());
2896 } else {
2897 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2898 }
2899}
2900
2901void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
2902 const FieldInfo& field_info) {
2903 Primitive::Type type = field_info.GetFieldType();
2904 LocationSummary* locations = instruction->GetLocations();
2905 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2906 LoadOperandType load_type = kLoadUnsignedByte;
2907 switch (type) {
2908 case Primitive::kPrimBoolean:
2909 load_type = kLoadUnsignedByte;
2910 break;
2911 case Primitive::kPrimByte:
2912 load_type = kLoadSignedByte;
2913 break;
2914 case Primitive::kPrimShort:
2915 load_type = kLoadSignedHalfword;
2916 break;
2917 case Primitive::kPrimChar:
2918 load_type = kLoadUnsignedHalfword;
2919 break;
2920 case Primitive::kPrimInt:
2921 case Primitive::kPrimFloat:
2922 load_type = kLoadWord;
2923 break;
2924 case Primitive::kPrimLong:
2925 case Primitive::kPrimDouble:
2926 load_type = kLoadDoubleword;
2927 break;
2928 case Primitive::kPrimNot:
2929 load_type = kLoadUnsignedWord;
2930 break;
2931 case Primitive::kPrimVoid:
2932 LOG(FATAL) << "Unreachable type " << type;
2933 UNREACHABLE();
2934 }
2935 if (!Primitive::IsFloatingPointType(type)) {
2936 DCHECK(locations->Out().IsRegister());
2937 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2938 __ LoadFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2939 } else {
2940 DCHECK(locations->Out().IsFpuRegister());
2941 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2942 __ LoadFpuFromOffset(load_type, dst, obj, field_info.GetFieldOffset().Uint32Value());
2943 }
2944
2945 codegen_->MaybeRecordImplicitNullCheck(instruction);
2946 // TODO: memory barrier?
2947}
2948
2949void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
2950 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
2951 LocationSummary* locations =
2952 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2953 locations->SetInAt(0, Location::RequiresRegister());
2954 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
2955 locations->SetInAt(1, Location::RequiresFpuRegister());
2956 } else {
2957 locations->SetInAt(1, Location::RequiresRegister());
2958 }
2959}
2960
2961void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01002962 const FieldInfo& field_info,
2963 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002964 Primitive::Type type = field_info.GetFieldType();
2965 LocationSummary* locations = instruction->GetLocations();
2966 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2967 StoreOperandType store_type = kStoreByte;
2968 switch (type) {
2969 case Primitive::kPrimBoolean:
2970 case Primitive::kPrimByte:
2971 store_type = kStoreByte;
2972 break;
2973 case Primitive::kPrimShort:
2974 case Primitive::kPrimChar:
2975 store_type = kStoreHalfword;
2976 break;
2977 case Primitive::kPrimInt:
2978 case Primitive::kPrimFloat:
2979 case Primitive::kPrimNot:
2980 store_type = kStoreWord;
2981 break;
2982 case Primitive::kPrimLong:
2983 case Primitive::kPrimDouble:
2984 store_type = kStoreDoubleword;
2985 break;
2986 case Primitive::kPrimVoid:
2987 LOG(FATAL) << "Unreachable type " << type;
2988 UNREACHABLE();
2989 }
2990 if (!Primitive::IsFloatingPointType(type)) {
2991 DCHECK(locations->InAt(1).IsRegister());
2992 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
2993 __ StoreToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2994 } else {
2995 DCHECK(locations->InAt(1).IsFpuRegister());
2996 FpuRegister src = locations->InAt(1).AsFpuRegister<FpuRegister>();
2997 __ StoreFpuToOffset(store_type, src, obj, field_info.GetFieldOffset().Uint32Value());
2998 }
2999
3000 codegen_->MaybeRecordImplicitNullCheck(instruction);
3001 // TODO: memory barriers?
3002 if (CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1))) {
3003 DCHECK(locations->InAt(1).IsRegister());
3004 GpuRegister src = locations->InAt(1).AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003005 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003006 }
3007}
3008
3009void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3010 HandleFieldGet(instruction, instruction->GetFieldInfo());
3011}
3012
3013void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
3014 HandleFieldGet(instruction, instruction->GetFieldInfo());
3015}
3016
3017void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
3018 HandleFieldSet(instruction, instruction->GetFieldInfo());
3019}
3020
3021void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01003022 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003023}
3024
Alexey Frunzef63f5692016-12-13 17:43:11 -08003025void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
3026 HInstruction* instruction ATTRIBUTE_UNUSED,
3027 Location root,
3028 GpuRegister obj,
3029 uint32_t offset) {
3030 // When handling HLoadClass::LoadKind::kDexCachePcRelative, the caller calls
3031 // EmitPcRelativeAddressPlaceholderHigh() and then GenerateGcRootFieldLoad().
3032 // The relative patcher expects the two methods to emit the following patchable
3033 // sequence of instructions in this case:
3034 // auipc reg1, 0x1234 // 0x1234 is a placeholder for offset_high.
3035 // lwu reg2, 0x5678(reg1) // 0x5678 is a placeholder for offset_low.
3036 // TODO: Adjust GenerateGcRootFieldLoad() and its caller when this method is
3037 // extended (e.g. for read barriers) so as not to break the relative patcher.
3038 GpuRegister root_reg = root.AsRegister<GpuRegister>();
3039 if (kEmitCompilerReadBarrier) {
3040 UNIMPLEMENTED(FATAL) << "for read barrier";
3041 } else {
3042 // Plain GC root load with no read barrier.
3043 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
3044 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
3045 // Note that GC roots are not affected by heap poisoning, thus we
3046 // do not have to unpoison `root_reg` here.
3047 }
3048}
3049
Alexey Frunze4dda3372015-06-01 18:31:49 -07003050void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3051 LocationSummary::CallKind call_kind =
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003052 instruction->IsExactCheck() ? LocationSummary::kNoCall : LocationSummary::kCallOnSlowPath;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003053 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3054 locations->SetInAt(0, Location::RequiresRegister());
3055 locations->SetInAt(1, Location::RequiresRegister());
3056 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003057 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07003058 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3059}
3060
3061void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
3062 LocationSummary* locations = instruction->GetLocations();
3063 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
3064 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
3065 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3066
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003067 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003068
3069 // Return 0 if `obj` is null.
3070 // TODO: Avoid this check if we know `obj` is not null.
3071 __ Move(out, ZERO);
3072 __ Beqzc(obj, &done);
3073
3074 // Compare the class of `obj` with `cls`.
3075 __ LoadFromOffset(kLoadUnsignedWord, out, obj, mirror::Object::ClassOffset().Int32Value());
Nicolas Geoffray85c7bab2015-09-18 13:40:46 +00003076 if (instruction->IsExactCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003077 // Classes must be equal for the instanceof to succeed.
3078 __ Xor(out, out, cls);
3079 __ Sltiu(out, out, 1);
3080 } else {
3081 // If the classes are not equal, we go into a slow path.
3082 DCHECK(locations->OnlyCallsOnSlowPath());
3083 SlowPathCodeMIPS64* slow_path =
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01003084 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003085 codegen_->AddSlowPath(slow_path);
3086 __ Bnec(out, cls, slow_path->GetEntryLabel());
3087 __ LoadConst32(out, 1);
3088 __ Bind(slow_path->GetExitLabel());
3089 }
3090
3091 __ Bind(&done);
3092}
3093
3094void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
3095 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3096 locations->SetOut(Location::ConstantLocation(constant));
3097}
3098
3099void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
3100 // Will be generated at use site.
3101}
3102
3103void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
3104 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3105 locations->SetOut(Location::ConstantLocation(constant));
3106}
3107
3108void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
3109 // Will be generated at use site.
3110}
3111
Calin Juravle175dc732015-08-25 15:42:32 +01003112void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3113 // The trampoline uses the same calling convention as dex calling conventions,
3114 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
3115 // the method_idx.
3116 HandleInvoke(invoke);
3117}
3118
3119void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
3120 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
3121}
3122
Alexey Frunze4dda3372015-06-01 18:31:49 -07003123void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
3124 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
3125 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
3126}
3127
3128void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3129 HandleInvoke(invoke);
3130 // The register T0 is required to be used for the hidden argument in
3131 // art_quick_imt_conflict_trampoline, so add the hidden argument.
3132 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
3133}
3134
3135void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
3136 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
3137 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003138 Location receiver = invoke->GetLocations()->InAt(0);
3139 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003140 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003141
3142 // Set the hidden argument.
3143 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
3144 invoke->GetDexMethodIndex());
3145
3146 // temp = object->GetClass();
3147 if (receiver.IsStackSlot()) {
3148 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
3149 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
3150 } else {
3151 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
3152 }
3153 codegen_->MaybeRecordImplicitNullCheck(invoke);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00003154 __ LoadFromOffset(kLoadDoubleword, temp, temp,
3155 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
3156 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00003157 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003158 // temp = temp->GetImtEntryAt(method_offset);
3159 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3160 // T9 = temp->GetEntryPoint();
3161 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3162 // T9();
3163 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003164 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003165 DCHECK(!codegen_->IsLeafMethod());
3166 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3167}
3168
3169void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07003170 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3171 if (intrinsic.TryDispatch(invoke)) {
3172 return;
3173 }
3174
Alexey Frunze4dda3372015-06-01 18:31:49 -07003175 HandleInvoke(invoke);
3176}
3177
3178void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003179 // Explicit clinit checks triggered by static invokes must have been pruned by
3180 // art::PrepareForRegisterAllocation.
3181 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003182
Chris Larsen3039e382015-08-26 07:54:08 -07003183 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
3184 if (intrinsic.TryDispatch(invoke)) {
3185 return;
3186 }
3187
Alexey Frunze4dda3372015-06-01 18:31:49 -07003188 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003189}
3190
Chris Larsen3039e382015-08-26 07:54:08 -07003191static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003192 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07003193 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
3194 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003195 return true;
3196 }
3197 return false;
3198}
3199
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003200HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08003201 HLoadString::LoadKind desired_string_load_kind) {
3202 if (kEmitCompilerReadBarrier) {
3203 UNIMPLEMENTED(FATAL) << "for read barrier";
3204 }
3205 bool fallback_load = false;
3206 switch (desired_string_load_kind) {
3207 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3208 DCHECK(!GetCompilerOptions().GetCompilePic());
3209 break;
3210 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
3211 DCHECK(GetCompilerOptions().GetCompilePic());
3212 break;
3213 case HLoadString::LoadKind::kBootImageAddress:
3214 break;
3215 case HLoadString::LoadKind::kBssEntry:
3216 DCHECK(!Runtime::Current()->UseJitCompilation());
3217 break;
3218 case HLoadString::LoadKind::kDexCacheViaMethod:
3219 break;
3220 case HLoadString::LoadKind::kJitTableAddress:
3221 DCHECK(Runtime::Current()->UseJitCompilation());
3222 // TODO: implement.
3223 fallback_load = true;
3224 break;
3225 }
3226 if (fallback_load) {
3227 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
3228 }
3229 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00003230}
3231
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003232HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
3233 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003234 if (kEmitCompilerReadBarrier) {
3235 UNIMPLEMENTED(FATAL) << "for read barrier";
3236 }
3237 bool fallback_load = false;
3238 switch (desired_class_load_kind) {
3239 case HLoadClass::LoadKind::kReferrersClass:
3240 break;
3241 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3242 DCHECK(!GetCompilerOptions().GetCompilePic());
3243 break;
3244 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
3245 DCHECK(GetCompilerOptions().GetCompilePic());
3246 break;
3247 case HLoadClass::LoadKind::kBootImageAddress:
3248 break;
3249 case HLoadClass::LoadKind::kJitTableAddress:
3250 DCHECK(Runtime::Current()->UseJitCompilation());
3251 // TODO: implement.
3252 fallback_load = true;
3253 break;
3254 case HLoadClass::LoadKind::kDexCachePcRelative:
3255 DCHECK(!Runtime::Current()->UseJitCompilation());
3256 break;
3257 case HLoadClass::LoadKind::kDexCacheViaMethod:
3258 break;
3259 }
3260 if (fallback_load) {
3261 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
3262 }
3263 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01003264}
3265
Vladimir Markodc151b22015-10-15 18:02:30 +01003266HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
3267 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01003268 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08003269 // On MIPS64 we support all dispatch types.
3270 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01003271}
3272
Alexey Frunze4dda3372015-06-01 18:31:49 -07003273void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
3274 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00003275 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08003276 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
3277 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
3278
3279 // For better instruction scheduling we load the direct code pointer before the method pointer.
3280 switch (code_ptr_location) {
3281 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
3282 // T9 = invoke->GetDirectCodePtr();
3283 __ LoadLiteral(T9, kLoadDoubleword, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
3284 break;
3285 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3286 // T9 = code address from literal pool with link-time patch.
3287 __ LoadLiteral(T9,
3288 kLoadUnsignedWord,
3289 DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
3290 break;
3291 default:
3292 break;
3293 }
3294
3295 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003296 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00003297 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003298 uint32_t offset =
3299 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00003300 __ LoadFromOffset(kLoadDoubleword,
3301 temp.AsRegister<GpuRegister>(),
3302 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003303 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00003304 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01003305 }
Vladimir Marko58155012015-08-19 12:49:41 +00003306 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00003307 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003308 break;
3309 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003310 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3311 kLoadDoubleword,
3312 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00003313 break;
3314 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003315 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
3316 kLoadUnsignedWord,
3317 DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
3318 break;
3319 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
3320 uint32_t offset = invoke->GetDexCacheArrayOffset();
3321 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3322 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset);
3323 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3324 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
3325 break;
3326 }
Vladimir Marko58155012015-08-19 12:49:41 +00003327 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00003328 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00003329 GpuRegister reg = temp.AsRegister<GpuRegister>();
3330 GpuRegister method_reg;
3331 if (current_method.IsRegister()) {
3332 method_reg = current_method.AsRegister<GpuRegister>();
3333 } else {
3334 // TODO: use the appropriate DCHECK() here if possible.
3335 // DCHECK(invoke->GetLocations()->Intrinsified());
3336 DCHECK(!current_method.IsValid());
3337 method_reg = reg;
3338 __ Ld(reg, SP, kCurrentMethodStackOffset);
3339 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003340
Vladimir Marko58155012015-08-19 12:49:41 +00003341 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01003342 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00003343 reg,
3344 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01003345 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01003346 // temp = temp[index_in_cache];
3347 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
3348 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00003349 __ LoadFromOffset(kLoadDoubleword,
3350 reg,
3351 reg,
3352 CodeGenerator::GetCachePointerOffset(index_in_cache));
3353 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003354 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003355 }
3356
Alexey Frunze19f6c692016-11-30 19:19:55 -08003357 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00003358 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003359 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00003360 break;
3361 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
Alexey Frunze19f6c692016-11-30 19:19:55 -08003362 case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
3363 // T9 prepared above for better instruction scheduling.
3364 // T9()
Vladimir Marko58155012015-08-19 12:49:41 +00003365 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003366 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003367 break;
Alexey Frunze19f6c692016-11-30 19:19:55 -08003368 case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
3369 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3370 NewPcRelativeCallPatch(*invoke->GetTargetMethod().dex_file,
3371 invoke->GetTargetMethod().dex_method_index);
3372 EmitPcRelativeAddressPlaceholderHigh(info, AT);
3373 __ Jialc(AT, /* placeholder */ 0x5678);
3374 break;
3375 }
Vladimir Marko58155012015-08-19 12:49:41 +00003376 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
3377 // T9 = callee_method->entry_point_from_quick_compiled_code_;
3378 __ LoadFromOffset(kLoadDoubleword,
3379 T9,
3380 callee_method.AsRegister<GpuRegister>(),
3381 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07003382 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00003383 // T9()
3384 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003385 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00003386 break;
3387 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003388 DCHECK(!IsLeafMethod());
3389}
3390
3391void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00003392 // Explicit clinit checks triggered by static invokes must have been pruned by
3393 // art::PrepareForRegisterAllocation.
3394 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003395
3396 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3397 return;
3398 }
3399
3400 LocationSummary* locations = invoke->GetLocations();
3401 codegen_->GenerateStaticOrDirectCall(invoke,
3402 locations->HasTemps()
3403 ? locations->GetTemp(0)
3404 : Location::NoLocation());
3405 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3406}
3407
Alexey Frunze53afca12015-11-05 16:34:23 -08003408void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003409 // Use the calling convention instead of the location of the receiver, as
3410 // intrinsics may have put the receiver in a different register. In the intrinsics
3411 // slow path, the arguments have been moved to the right place, so here we are
3412 // guaranteed that the receiver is the first register of the calling convention.
3413 InvokeDexCallingConvention calling_convention;
3414 GpuRegister receiver = calling_convention.GetRegisterAt(0);
3415
Alexey Frunze53afca12015-11-05 16:34:23 -08003416 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003417 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
3418 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
3419 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07003420 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003421
3422 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00003423 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08003424 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003425 // temp = temp->GetMethodAt(method_offset);
3426 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
3427 // T9 = temp->GetEntryPoint();
3428 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
3429 // T9();
3430 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003431 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08003432}
3433
3434void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
3435 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
3436 return;
3437 }
3438
3439 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003440 DCHECK(!codegen_->IsLeafMethod());
3441 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
3442}
3443
3444void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003445 if (cls->NeedsAccessCheck()) {
3446 InvokeRuntimeCallingConvention calling_convention;
3447 CodeGenerator::CreateLoadClassLocationSummary(
3448 cls,
3449 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
3450 calling_convention.GetReturnLocation(Primitive::kPrimNot),
3451 /* code_generator_supports_read_barrier */ false);
3452 return;
3453 }
3454
3455 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
3456 ? LocationSummary::kCallOnSlowPath
3457 : LocationSummary::kNoCall;
3458 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3459 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3460 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3461 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3462 locations->SetInAt(0, Location::RequiresRegister());
3463 }
3464 locations->SetOut(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003465}
3466
3467void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
3468 LocationSummary* locations = cls->GetLocations();
Calin Juravle98893e12015-10-02 21:05:03 +01003469 if (cls->NeedsAccessCheck()) {
Andreas Gampea5b09a62016-11-17 15:21:22 -08003470 codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003471 codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003472 CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
Calin Juravle580b6092015-10-06 17:35:58 +01003473 return;
3474 }
3475
Alexey Frunzef63f5692016-12-13 17:43:11 -08003476 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
3477 Location out_loc = locations->Out();
3478 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3479 GpuRegister current_method_reg = ZERO;
3480 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
3481 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
3482 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
3483 }
3484
3485 bool generate_null_check = false;
3486 switch (load_kind) {
3487 case HLoadClass::LoadKind::kReferrersClass:
3488 DCHECK(!cls->CanCallRuntime());
3489 DCHECK(!cls->MustGenerateClinitCheck());
3490 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
3491 GenerateGcRootFieldLoad(cls,
3492 out_loc,
3493 current_method_reg,
3494 ArtMethod::DeclaringClassOffset().Int32Value());
3495 break;
3496 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
3497 DCHECK(!kEmitCompilerReadBarrier);
3498 __ LoadLiteral(out,
3499 kLoadUnsignedWord,
3500 codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
3501 cls->GetTypeIndex()));
3502 break;
3503 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
3504 DCHECK(!kEmitCompilerReadBarrier);
3505 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3506 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
3507 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3508 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3509 break;
3510 }
3511 case HLoadClass::LoadKind::kBootImageAddress: {
3512 DCHECK(!kEmitCompilerReadBarrier);
3513 DCHECK_NE(cls->GetAddress(), 0u);
3514 uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
3515 __ LoadLiteral(out,
3516 kLoadUnsignedWord,
3517 codegen_->DeduplicateBootImageAddressLiteral(address));
3518 break;
3519 }
3520 case HLoadClass::LoadKind::kJitTableAddress: {
3521 LOG(FATAL) << "Unimplemented";
3522 break;
3523 }
3524 case HLoadClass::LoadKind::kDexCachePcRelative: {
3525 uint32_t element_offset = cls->GetDexCacheElementOffset();
3526 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3527 codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), element_offset);
3528 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3529 // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
3530 GenerateGcRootFieldLoad(cls, out_loc, AT, /* placeholder */ 0x5678);
3531 generate_null_check = !cls->IsInDexCache();
3532 break;
3533 }
3534 case HLoadClass::LoadKind::kDexCacheViaMethod: {
3535 // /* GcRoot<mirror::Class>[] */ out =
3536 // current_method.ptr_sized_fields_->dex_cache_resolved_types_
3537 __ LoadFromOffset(kLoadDoubleword,
3538 out,
3539 current_method_reg,
3540 ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
3541 // /* GcRoot<mirror::Class> */ out = out[type_index]
3542 size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
3543 GenerateGcRootFieldLoad(cls, out_loc, out, offset);
3544 generate_null_check = !cls->IsInDexCache();
3545 }
3546 }
3547
3548 if (generate_null_check || cls->MustGenerateClinitCheck()) {
3549 DCHECK(cls->CanCallRuntime());
3550 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
3551 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3552 codegen_->AddSlowPath(slow_path);
3553 if (generate_null_check) {
3554 __ Beqzc(out, slow_path->GetEntryLabel());
3555 }
3556 if (cls->MustGenerateClinitCheck()) {
3557 GenerateClassInitializationCheck(slow_path, out);
3558 } else {
3559 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003560 }
3561 }
3562}
3563
David Brazdilcb1c0552015-08-04 16:22:25 +01003564static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07003565 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01003566}
3567
Alexey Frunze4dda3372015-06-01 18:31:49 -07003568void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
3569 LocationSummary* locations =
3570 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3571 locations->SetOut(Location::RequiresRegister());
3572}
3573
3574void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
3575 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01003576 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
3577}
3578
3579void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
3580 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
3581}
3582
3583void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
3584 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003585}
3586
Alexey Frunze4dda3372015-06-01 18:31:49 -07003587void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003588 HLoadString::LoadKind load_kind = load->GetLoadKind();
3589 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00003590 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunzef63f5692016-12-13 17:43:11 -08003591 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
3592 InvokeRuntimeCallingConvention calling_convention;
3593 locations->SetOut(calling_convention.GetReturnLocation(load->GetType()));
3594 } else {
3595 locations->SetOut(Location::RequiresRegister());
3596 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003597}
3598
3599void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08003600 HLoadString::LoadKind load_kind = load->GetLoadKind();
3601 LocationSummary* locations = load->GetLocations();
3602 Location out_loc = locations->Out();
3603 GpuRegister out = out_loc.AsRegister<GpuRegister>();
3604
3605 switch (load_kind) {
3606 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
3607 __ LoadLiteral(out,
3608 kLoadUnsignedWord,
3609 codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
3610 load->GetStringIndex()));
3611 return; // No dex cache slow path.
3612 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
3613 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
3614 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3615 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
3616 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3617 __ Daddiu(out, AT, /* placeholder */ 0x5678);
3618 return; // No dex cache slow path.
3619 }
3620 case HLoadString::LoadKind::kBootImageAddress: {
3621 DCHECK_NE(load->GetAddress(), 0u);
3622 uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
3623 __ LoadLiteral(out,
3624 kLoadUnsignedWord,
3625 codegen_->DeduplicateBootImageAddressLiteral(address));
3626 return; // No dex cache slow path.
3627 }
3628 case HLoadString::LoadKind::kBssEntry: {
3629 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
3630 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
3631 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
3632 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
3633 __ Lwu(out, AT, /* placeholder */ 0x5678);
3634 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
3635 codegen_->AddSlowPath(slow_path);
3636 __ Beqzc(out, slow_path->GetEntryLabel());
3637 __ Bind(slow_path->GetExitLabel());
3638 return;
3639 }
3640 default:
3641 break;
3642 }
3643
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07003644 // TODO: Re-add the compiler code to do string dex cache lookup again.
Alexey Frunzef63f5692016-12-13 17:43:11 -08003645 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
3646 InvokeRuntimeCallingConvention calling_convention;
3647 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
3648 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
3649 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003650}
3651
Alexey Frunze4dda3372015-06-01 18:31:49 -07003652void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
3653 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
3654 locations->SetOut(Location::ConstantLocation(constant));
3655}
3656
3657void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
3658 // Will be generated at use site.
3659}
3660
3661void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
3662 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003663 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003664 InvokeRuntimeCallingConvention calling_convention;
3665 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3666}
3667
3668void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01003669 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07003670 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01003671 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003672 if (instruction->IsEnter()) {
3673 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
3674 } else {
3675 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
3676 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003677}
3678
3679void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
3680 LocationSummary* locations =
3681 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
3682 switch (mul->GetResultType()) {
3683 case Primitive::kPrimInt:
3684 case Primitive::kPrimLong:
3685 locations->SetInAt(0, Location::RequiresRegister());
3686 locations->SetInAt(1, Location::RequiresRegister());
3687 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3688 break;
3689
3690 case Primitive::kPrimFloat:
3691 case Primitive::kPrimDouble:
3692 locations->SetInAt(0, Location::RequiresFpuRegister());
3693 locations->SetInAt(1, Location::RequiresFpuRegister());
3694 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3695 break;
3696
3697 default:
3698 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
3699 }
3700}
3701
3702void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
3703 Primitive::Type type = instruction->GetType();
3704 LocationSummary* locations = instruction->GetLocations();
3705
3706 switch (type) {
3707 case Primitive::kPrimInt:
3708 case Primitive::kPrimLong: {
3709 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3710 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3711 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
3712 if (type == Primitive::kPrimInt)
3713 __ MulR6(dst, lhs, rhs);
3714 else
3715 __ Dmul(dst, lhs, rhs);
3716 break;
3717 }
3718 case Primitive::kPrimFloat:
3719 case Primitive::kPrimDouble: {
3720 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3721 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3722 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3723 if (type == Primitive::kPrimFloat)
3724 __ MulS(dst, lhs, rhs);
3725 else
3726 __ MulD(dst, lhs, rhs);
3727 break;
3728 }
3729 default:
3730 LOG(FATAL) << "Unexpected mul type " << type;
3731 }
3732}
3733
3734void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
3735 LocationSummary* locations =
3736 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
3737 switch (neg->GetResultType()) {
3738 case Primitive::kPrimInt:
3739 case Primitive::kPrimLong:
3740 locations->SetInAt(0, Location::RequiresRegister());
3741 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3742 break;
3743
3744 case Primitive::kPrimFloat:
3745 case Primitive::kPrimDouble:
3746 locations->SetInAt(0, Location::RequiresFpuRegister());
3747 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3748 break;
3749
3750 default:
3751 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
3752 }
3753}
3754
3755void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
3756 Primitive::Type type = instruction->GetType();
3757 LocationSummary* locations = instruction->GetLocations();
3758
3759 switch (type) {
3760 case Primitive::kPrimInt:
3761 case Primitive::kPrimLong: {
3762 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3763 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3764 if (type == Primitive::kPrimInt)
3765 __ Subu(dst, ZERO, src);
3766 else
3767 __ Dsubu(dst, ZERO, src);
3768 break;
3769 }
3770 case Primitive::kPrimFloat:
3771 case Primitive::kPrimDouble: {
3772 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3773 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
3774 if (type == Primitive::kPrimFloat)
3775 __ NegS(dst, src);
3776 else
3777 __ NegD(dst, src);
3778 break;
3779 }
3780 default:
3781 LOG(FATAL) << "Unexpected neg type " << type;
3782 }
3783}
3784
3785void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
3786 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003787 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003788 InvokeRuntimeCallingConvention calling_convention;
3789 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3790 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3791 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3792 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
3793}
3794
3795void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
3796 LocationSummary* locations = instruction->GetLocations();
3797 // Move an uint16_t value to a register.
Andreas Gampea5b09a62016-11-17 15:21:22 -08003798 __ LoadConst32(locations->GetTemp(0).AsRegister<GpuRegister>(),
3799 instruction->GetTypeIndex().index_);
Serban Constantinescufc734082016-07-19 17:18:07 +01003800 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003801 CheckEntrypointTypes<kQuickAllocArrayWithAccessCheck, void*, uint32_t, int32_t, ArtMethod*>();
3802}
3803
3804void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
3805 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003806 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003807 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00003808 if (instruction->IsStringAlloc()) {
3809 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
3810 } else {
3811 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3812 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3813 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003814 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
3815}
3816
3817void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
David Brazdil6de19382016-01-08 17:37:10 +00003818 if (instruction->IsStringAlloc()) {
3819 // String is allocated through StringFactory. Call NewEmptyString entry point.
3820 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02003821 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07003822 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00003823 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
3824 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
3825 __ Jalr(T9);
3826 __ Nop();
3827 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3828 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01003829 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
David Brazdil6de19382016-01-08 17:37:10 +00003830 CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
3831 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003832}
3833
3834void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
3835 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3836 locations->SetInAt(0, Location::RequiresRegister());
3837 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3838}
3839
3840void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
3841 Primitive::Type type = instruction->GetType();
3842 LocationSummary* locations = instruction->GetLocations();
3843
3844 switch (type) {
3845 case Primitive::kPrimInt:
3846 case Primitive::kPrimLong: {
3847 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3848 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
3849 __ Nor(dst, src, ZERO);
3850 break;
3851 }
3852
3853 default:
3854 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
3855 }
3856}
3857
3858void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3859 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3860 locations->SetInAt(0, Location::RequiresRegister());
3861 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3862}
3863
3864void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
3865 LocationSummary* locations = instruction->GetLocations();
3866 __ Xori(locations->Out().AsRegister<GpuRegister>(),
3867 locations->InAt(0).AsRegister<GpuRegister>(),
3868 1);
3869}
3870
3871void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003872 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
3873 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003874}
3875
Calin Juravle2ae48182016-03-16 14:05:09 +00003876void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
3877 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003878 return;
3879 }
3880 Location obj = instruction->GetLocations()->InAt(0);
3881
3882 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00003883 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003884}
3885
Calin Juravle2ae48182016-03-16 14:05:09 +00003886void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003887 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00003888 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003889
3890 Location obj = instruction->GetLocations()->InAt(0);
3891
3892 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3893}
3894
3895void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00003896 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003897}
3898
3899void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
3900 HandleBinaryOp(instruction);
3901}
3902
3903void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
3904 HandleBinaryOp(instruction);
3905}
3906
3907void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
3908 LOG(FATAL) << "Unreachable";
3909}
3910
3911void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
3912 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3913}
3914
3915void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
3916 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3917 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
3918 if (location.IsStackSlot()) {
3919 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3920 } else if (location.IsDoubleStackSlot()) {
3921 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
3922 }
3923 locations->SetOut(location);
3924}
3925
3926void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
3927 ATTRIBUTE_UNUSED) {
3928 // Nothing to do, the parameter is already at its location.
3929}
3930
3931void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
3932 LocationSummary* locations =
3933 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3934 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
3935}
3936
3937void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
3938 ATTRIBUTE_UNUSED) {
3939 // Nothing to do, the method is already at its location.
3940}
3941
3942void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
3943 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01003944 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003945 locations->SetInAt(i, Location::Any());
3946 }
3947 locations->SetOut(Location::Any());
3948}
3949
3950void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
3951 LOG(FATAL) << "Unreachable";
3952}
3953
3954void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
3955 Primitive::Type type = rem->GetResultType();
3956 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003957 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
3958 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003959 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
3960
3961 switch (type) {
3962 case Primitive::kPrimInt:
3963 case Primitive::kPrimLong:
3964 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003965 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003966 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3967 break;
3968
3969 case Primitive::kPrimFloat:
3970 case Primitive::kPrimDouble: {
3971 InvokeRuntimeCallingConvention calling_convention;
3972 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
3973 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
3974 locations->SetOut(calling_convention.GetReturnLocation(type));
3975 break;
3976 }
3977
3978 default:
3979 LOG(FATAL) << "Unexpected rem type " << type;
3980 }
3981}
3982
3983void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
3984 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003985
3986 switch (type) {
3987 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003988 case Primitive::kPrimLong:
3989 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003990 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003991
3992 case Primitive::kPrimFloat:
3993 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01003994 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
3995 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00003996 if (type == Primitive::kPrimFloat) {
3997 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
3998 } else {
3999 CheckEntrypointTypes<kQuickFmod, double, double, double>();
4000 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004001 break;
4002 }
4003 default:
4004 LOG(FATAL) << "Unexpected rem type " << type;
4005 }
4006}
4007
4008void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4009 memory_barrier->SetLocations(nullptr);
4010}
4011
4012void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
4013 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
4014}
4015
4016void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
4017 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
4018 Primitive::Type return_type = ret->InputAt(0)->GetType();
4019 locations->SetInAt(0, Mips64ReturnLocation(return_type));
4020}
4021
4022void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
4023 codegen_->GenerateFrameExit();
4024}
4025
4026void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
4027 ret->SetLocations(nullptr);
4028}
4029
4030void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
4031 codegen_->GenerateFrameExit();
4032}
4033
Alexey Frunze92d90602015-12-18 18:16:36 -08004034void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
4035 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004036}
4037
Alexey Frunze92d90602015-12-18 18:16:36 -08004038void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
4039 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00004040}
4041
Alexey Frunze4dda3372015-06-01 18:31:49 -07004042void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
4043 HandleShift(shl);
4044}
4045
4046void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
4047 HandleShift(shl);
4048}
4049
4050void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
4051 HandleShift(shr);
4052}
4053
4054void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
4055 HandleShift(shr);
4056}
4057
Alexey Frunze4dda3372015-06-01 18:31:49 -07004058void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
4059 HandleBinaryOp(instruction);
4060}
4061
4062void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
4063 HandleBinaryOp(instruction);
4064}
4065
4066void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4067 HandleFieldGet(instruction, instruction->GetFieldInfo());
4068}
4069
4070void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
4071 HandleFieldGet(instruction, instruction->GetFieldInfo());
4072}
4073
4074void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
4075 HandleFieldSet(instruction, instruction->GetFieldInfo());
4076}
4077
4078void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004079 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004080}
4081
Calin Juravlee460d1d2015-09-29 04:52:17 +01004082void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
4083 HUnresolvedInstanceFieldGet* instruction) {
4084 FieldAccessCallingConventionMIPS64 calling_convention;
4085 codegen_->CreateUnresolvedFieldLocationSummary(
4086 instruction, instruction->GetFieldType(), calling_convention);
4087}
4088
4089void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
4090 HUnresolvedInstanceFieldGet* instruction) {
4091 FieldAccessCallingConventionMIPS64 calling_convention;
4092 codegen_->GenerateUnresolvedFieldAccess(instruction,
4093 instruction->GetFieldType(),
4094 instruction->GetFieldIndex(),
4095 instruction->GetDexPc(),
4096 calling_convention);
4097}
4098
4099void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
4100 HUnresolvedInstanceFieldSet* instruction) {
4101 FieldAccessCallingConventionMIPS64 calling_convention;
4102 codegen_->CreateUnresolvedFieldLocationSummary(
4103 instruction, instruction->GetFieldType(), calling_convention);
4104}
4105
4106void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
4107 HUnresolvedInstanceFieldSet* instruction) {
4108 FieldAccessCallingConventionMIPS64 calling_convention;
4109 codegen_->GenerateUnresolvedFieldAccess(instruction,
4110 instruction->GetFieldType(),
4111 instruction->GetFieldIndex(),
4112 instruction->GetDexPc(),
4113 calling_convention);
4114}
4115
4116void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
4117 HUnresolvedStaticFieldGet* instruction) {
4118 FieldAccessCallingConventionMIPS64 calling_convention;
4119 codegen_->CreateUnresolvedFieldLocationSummary(
4120 instruction, instruction->GetFieldType(), calling_convention);
4121}
4122
4123void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
4124 HUnresolvedStaticFieldGet* instruction) {
4125 FieldAccessCallingConventionMIPS64 calling_convention;
4126 codegen_->GenerateUnresolvedFieldAccess(instruction,
4127 instruction->GetFieldType(),
4128 instruction->GetFieldIndex(),
4129 instruction->GetDexPc(),
4130 calling_convention);
4131}
4132
4133void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
4134 HUnresolvedStaticFieldSet* instruction) {
4135 FieldAccessCallingConventionMIPS64 calling_convention;
4136 codegen_->CreateUnresolvedFieldLocationSummary(
4137 instruction, instruction->GetFieldType(), calling_convention);
4138}
4139
4140void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
4141 HUnresolvedStaticFieldSet* instruction) {
4142 FieldAccessCallingConventionMIPS64 calling_convention;
4143 codegen_->GenerateUnresolvedFieldAccess(instruction,
4144 instruction->GetFieldType(),
4145 instruction->GetFieldIndex(),
4146 instruction->GetDexPc(),
4147 calling_convention);
4148}
4149
Alexey Frunze4dda3372015-06-01 18:31:49 -07004150void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01004151 LocationSummary* locations =
4152 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01004153 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07004154}
4155
4156void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
4157 HBasicBlock* block = instruction->GetBlock();
4158 if (block->GetLoopInformation() != nullptr) {
4159 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
4160 // The back edge will generate the suspend check.
4161 return;
4162 }
4163 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
4164 // The goto will generate the suspend check.
4165 return;
4166 }
4167 GenerateSuspendCheck(instruction, nullptr);
4168}
4169
Alexey Frunze4dda3372015-06-01 18:31:49 -07004170void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
4171 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01004172 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004173 InvokeRuntimeCallingConvention calling_convention;
4174 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4175}
4176
4177void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01004178 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004179 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
4180}
4181
4182void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4183 Primitive::Type input_type = conversion->GetInputType();
4184 Primitive::Type result_type = conversion->GetResultType();
4185 DCHECK_NE(input_type, result_type);
4186
4187 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
4188 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
4189 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
4190 }
4191
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004192 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
4193
4194 if (Primitive::IsFloatingPointType(input_type)) {
4195 locations->SetInAt(0, Location::RequiresFpuRegister());
4196 } else {
4197 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004198 }
4199
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004200 if (Primitive::IsFloatingPointType(result_type)) {
4201 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004202 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004204 }
4205}
4206
4207void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
4208 LocationSummary* locations = conversion->GetLocations();
4209 Primitive::Type result_type = conversion->GetResultType();
4210 Primitive::Type input_type = conversion->GetInputType();
4211
4212 DCHECK_NE(input_type, result_type);
4213
4214 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
4215 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4216 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4217
4218 switch (result_type) {
4219 case Primitive::kPrimChar:
4220 __ Andi(dst, src, 0xFFFF);
4221 break;
4222 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004223 if (input_type == Primitive::kPrimLong) {
4224 // Type conversion from long to types narrower than int is a result of code
4225 // transformations. To avoid unpredictable results for SEB and SEH, we first
4226 // need to sign-extend the low 32-bit value into bits 32 through 63.
4227 __ Sll(dst, src, 0);
4228 __ Seb(dst, dst);
4229 } else {
4230 __ Seb(dst, src);
4231 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004232 break;
4233 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00004234 if (input_type == Primitive::kPrimLong) {
4235 // Type conversion from long to types narrower than int is a result of code
4236 // transformations. To avoid unpredictable results for SEB and SEH, we first
4237 // need to sign-extend the low 32-bit value into bits 32 through 63.
4238 __ Sll(dst, src, 0);
4239 __ Seh(dst, dst);
4240 } else {
4241 __ Seh(dst, src);
4242 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004243 break;
4244 case Primitive::kPrimInt:
4245 case Primitive::kPrimLong:
4246 // Sign-extend 32-bit int into bits 32 through 63 for
4247 // int-to-long and long-to-int conversions
4248 __ Sll(dst, src, 0);
4249 break;
4250
4251 default:
4252 LOG(FATAL) << "Unexpected type conversion from " << input_type
4253 << " to " << result_type;
4254 }
4255 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004256 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4257 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
4258 if (input_type == Primitive::kPrimLong) {
4259 __ Dmtc1(src, FTMP);
4260 if (result_type == Primitive::kPrimFloat) {
4261 __ Cvtsl(dst, FTMP);
4262 } else {
4263 __ Cvtdl(dst, FTMP);
4264 }
4265 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004266 __ Mtc1(src, FTMP);
4267 if (result_type == Primitive::kPrimFloat) {
4268 __ Cvtsw(dst, FTMP);
4269 } else {
4270 __ Cvtdw(dst, FTMP);
4271 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004272 }
4273 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
4274 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004275 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4276 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4277 Mips64Label truncate;
4278 Mips64Label done;
4279
4280 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
4281 // value when the input is either a NaN or is outside of the range of the output type
4282 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
4283 // the same result.
4284 //
4285 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
4286 // value of the output type if the input is outside of the range after the truncation or
4287 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
4288 // results. This matches the desired float/double-to-int/long conversion exactly.
4289 //
4290 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
4291 //
4292 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
4293 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
4294 // even though it must be NAN2008=1 on R6.
4295 //
4296 // The code takes care of the different behaviors by first comparing the input to the
4297 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
4298 // If the input is greater than or equal to the minimum, it procedes to the truncate
4299 // instruction, which will handle such an input the same way irrespective of NAN2008.
4300 // Otherwise the input is compared to itself to determine whether it is a NaN or not
4301 // in order to return either zero or the minimum value.
4302 //
4303 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
4304 // truncate instruction for MIPS64R6.
4305 if (input_type == Primitive::kPrimFloat) {
4306 uint32_t min_val = (result_type == Primitive::kPrimLong)
4307 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
4308 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
4309 __ LoadConst32(TMP, min_val);
4310 __ Mtc1(TMP, FTMP);
4311 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004312 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004313 uint64_t min_val = (result_type == Primitive::kPrimLong)
4314 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
4315 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
4316 __ LoadConst64(TMP, min_val);
4317 __ Dmtc1(TMP, FTMP);
4318 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004319 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004320
4321 __ Bc1nez(FTMP, &truncate);
4322
4323 if (input_type == Primitive::kPrimFloat) {
4324 __ CmpEqS(FTMP, src, src);
4325 } else {
4326 __ CmpEqD(FTMP, src, src);
4327 }
4328 if (result_type == Primitive::kPrimLong) {
4329 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
4330 } else {
4331 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
4332 }
4333 __ Mfc1(TMP, FTMP);
4334 __ And(dst, dst, TMP);
4335
4336 __ Bc(&done);
4337
4338 __ Bind(&truncate);
4339
4340 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00004341 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004342 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004343 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004344 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004345 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004346 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004347 } else {
4348 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004349 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004350 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004351 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00004352 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004353 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00004354 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08004355
4356 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004357 } else if (Primitive::IsFloatingPointType(result_type) &&
4358 Primitive::IsFloatingPointType(input_type)) {
4359 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
4360 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
4361 if (result_type == Primitive::kPrimFloat) {
4362 __ Cvtsd(dst, src);
4363 } else {
4364 __ Cvtds(dst, src);
4365 }
4366 } else {
4367 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
4368 << " to " << result_type;
4369 }
4370}
4371
4372void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
4373 HandleShift(ushr);
4374}
4375
4376void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
4377 HandleShift(ushr);
4378}
4379
4380void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
4381 HandleBinaryOp(instruction);
4382}
4383
4384void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
4385 HandleBinaryOp(instruction);
4386}
4387
4388void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4389 // Nothing to do, this should be removed during prepare for register allocator.
4390 LOG(FATAL) << "Unreachable";
4391}
4392
4393void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
4394 // Nothing to do, this should be removed during prepare for register allocator.
4395 LOG(FATAL) << "Unreachable";
4396}
4397
4398void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004399 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004400}
4401
4402void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004403 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004404}
4405
4406void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004407 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004408}
4409
4410void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004411 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004412}
4413
4414void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004415 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004416}
4417
4418void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004419 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004420}
4421
4422void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004423 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004424}
4425
4426void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004427 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004428}
4429
4430void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004431 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004432}
4433
4434void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004435 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004436}
4437
4438void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004439 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004440}
4441
4442void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004443 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004444}
4445
Aart Bike9f37602015-10-09 11:15:55 -07004446void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004447 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004448}
4449
4450void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004451 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004452}
4453
4454void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004455 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004456}
4457
4458void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004459 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004460}
4461
4462void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004463 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004464}
4465
4466void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004467 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004468}
4469
4470void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004471 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004472}
4473
4474void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00004475 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07004476}
4477
Mark Mendellfe57faa2015-09-18 09:26:15 -04004478// Simple implementation of packed switch - generate cascaded compare/jumps.
4479void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4480 LocationSummary* locations =
4481 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
4482 locations->SetInAt(0, Location::RequiresRegister());
4483}
4484
Alexey Frunze0960ac52016-12-20 17:24:59 -08004485void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
4486 int32_t lower_bound,
4487 uint32_t num_entries,
4488 HBasicBlock* switch_block,
4489 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004490 // Create a set of compare/jumps.
4491 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08004492 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004493 // Jump to default if index is negative
4494 // Note: We don't check the case that index is positive while value < lower_bound, because in
4495 // this case, index >= num_entries must be true. So that we can save one branch instruction.
4496 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
4497
Alexey Frunze0960ac52016-12-20 17:24:59 -08004498 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00004499 // Jump to successors[0] if value == lower_bound.
4500 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
4501 int32_t last_index = 0;
4502 for (; num_entries - last_index > 2; last_index += 2) {
4503 __ Addiu(temp_reg, temp_reg, -2);
4504 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
4505 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
4506 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
4507 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
4508 }
4509 if (num_entries - last_index == 2) {
4510 // The last missing case_value.
4511 __ Addiu(temp_reg, temp_reg, -1);
4512 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004513 }
4514
4515 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08004516 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004517 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04004518 }
4519}
4520
Alexey Frunze0960ac52016-12-20 17:24:59 -08004521void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
4522 int32_t lower_bound,
4523 uint32_t num_entries,
4524 HBasicBlock* switch_block,
4525 HBasicBlock* default_block) {
4526 // Create a jump table.
4527 std::vector<Mips64Label*> labels(num_entries);
4528 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
4529 for (uint32_t i = 0; i < num_entries; i++) {
4530 labels[i] = codegen_->GetLabelOf(successors[i]);
4531 }
4532 JumpTable* table = __ CreateJumpTable(std::move(labels));
4533
4534 // Is the value in range?
4535 __ Addiu32(TMP, value_reg, -lower_bound);
4536 __ LoadConst32(AT, num_entries);
4537 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
4538
4539 // We are in the range of the table.
4540 // Load the target address from the jump table, indexing by the value.
4541 __ LoadLabelAddress(AT, table->GetLabel());
4542 __ Sll(TMP, TMP, 2);
4543 __ Daddu(TMP, TMP, AT);
4544 __ Lw(TMP, TMP, 0);
4545 // Compute the absolute target address by adding the table start address
4546 // (the table contains offsets to targets relative to its start).
4547 __ Daddu(TMP, TMP, AT);
4548 // And jump.
4549 __ Jr(TMP);
4550 __ Nop();
4551}
4552
4553void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
4554 int32_t lower_bound = switch_instr->GetStartValue();
4555 uint32_t num_entries = switch_instr->GetNumEntries();
4556 LocationSummary* locations = switch_instr->GetLocations();
4557 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
4558 HBasicBlock* switch_block = switch_instr->GetBlock();
4559 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
4560
4561 if (num_entries > kPackedSwitchJumpTableThreshold) {
4562 GenTableBasedPackedSwitch(value_reg,
4563 lower_bound,
4564 num_entries,
4565 switch_block,
4566 default_block);
4567 } else {
4568 GenPackedSwitchWithCompares(value_reg,
4569 lower_bound,
4570 num_entries,
4571 switch_block,
4572 default_block);
4573 }
4574}
4575
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00004576void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet*) {
4577 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4578}
4579
4580void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet*) {
4581 UNIMPLEMENTED(FATAL) << "ClassTableGet is unimplemented on mips64";
4582}
4583
Alexey Frunze4dda3372015-06-01 18:31:49 -07004584} // namespace mips64
4585} // namespace art