blob: a9c4964a894ff5e9e1fb05d8f144436e7d1a1908 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunzec857c742015-09-23 15:12:39 -070019#include "art_method.h"
20#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080021#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070022#include "entrypoints/quick/quick_entrypoints.h"
23#include "entrypoints/quick/quick_entrypoints_enum.h"
24#include "gc/accounting/card_table.h"
25#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070026#include "intrinsics_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070027#include "mirror/array-inl.h"
28#include "mirror/class-inl.h"
29#include "offsets.h"
30#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070032#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070033#include "utils/stack_checks.h"
34
35namespace art {
36namespace mips64 {
37
38static constexpr int kCurrentMethodStackOffset = 0;
39static constexpr GpuRegister kMethodRegisterArgument = A0;
40
Alexey Frunze4dda3372015-06-01 18:31:49 -070041Location Mips64ReturnLocation(Primitive::Type return_type) {
42 switch (return_type) {
43 case Primitive::kPrimBoolean:
44 case Primitive::kPrimByte:
45 case Primitive::kPrimChar:
46 case Primitive::kPrimShort:
47 case Primitive::kPrimInt:
48 case Primitive::kPrimNot:
49 case Primitive::kPrimLong:
50 return Location::RegisterLocation(V0);
51
52 case Primitive::kPrimFloat:
53 case Primitive::kPrimDouble:
54 return Location::FpuRegisterLocation(F0);
55
56 case Primitive::kPrimVoid:
57 return Location();
58 }
59 UNREACHABLE();
60}
61
62Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(Primitive::Type type) const {
63 return Mips64ReturnLocation(type);
64}
65
66Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
67 return Location::RegisterLocation(kMethodRegisterArgument);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(Primitive::Type type) {
71 Location next_location;
72 if (type == Primitive::kPrimVoid) {
73 LOG(FATAL) << "Unexpected parameter type " << type;
74 }
75
76 if (Primitive::IsFloatingPointType(type) &&
77 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
78 next_location = Location::FpuRegisterLocation(
79 calling_convention.GetFpuRegisterAt(float_index_++));
80 gp_index_++;
81 } else if (!Primitive::IsFloatingPointType(type) &&
82 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
83 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
84 float_index_++;
85 } else {
86 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
87 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
88 : Location::StackSlot(stack_offset);
89 }
90
91 // Space on the stack is reserved for all arguments.
92 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
93
Alexey Frunze4dda3372015-06-01 18:31:49 -070094 return next_location;
95}
96
97Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
98 return Mips64ReturnLocation(type);
99}
100
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100101// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
102#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700103#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700104
105class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
106 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000107 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108
109 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100110 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
112 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000113 if (instruction_->CanThrowIntoCatchBlock()) {
114 // Live registers will be restored in the catch block if caught.
115 SaveLiveRegisters(codegen, instruction_->GetLocations());
116 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117 // We're moving two locations to locations that could overlap, so we need a parallel
118 // move resolver.
119 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100120 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
122 Primitive::kPrimInt,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
125 Primitive::kPrimInt);
Serban Constantinescufc734082016-07-19 17:18:07 +0100126 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
127 ? kQuickThrowStringBounds
128 : kQuickThrowArrayBounds;
129 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100130 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700131 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
132 }
133
Alexandre Rames8158f282015-08-07 10:26:17 +0100134 bool IsFatal() const OVERRIDE { return true; }
135
Roland Levillain46648892015-06-19 16:07:18 +0100136 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
137
Alexey Frunze4dda3372015-06-01 18:31:49 -0700138 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700139 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
140};
141
142class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
143 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700144 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
145 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700146
147 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
148 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
149 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100150 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700151 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
152 }
153
Alexandre Rames8158f282015-08-07 10:26:17 +0100154 bool IsFatal() const OVERRIDE { return true; }
155
Roland Levillain46648892015-06-19 16:07:18 +0100156 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
157
Alexey Frunze4dda3372015-06-01 18:31:49 -0700158 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
160};
161
162class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
163 public:
164 LoadClassSlowPathMIPS64(HLoadClass* cls,
165 HInstruction* at,
166 uint32_t dex_pc,
167 bool do_clinit)
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000168 : SlowPathCodeMIPS64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700169 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
170 }
171
172 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000173 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700174 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
175
176 __ Bind(GetEntryLabel());
177 SaveLiveRegisters(codegen, locations);
178
179 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000180 dex::TypeIndex type_index = cls_->GetTypeIndex();
181 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100182 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
183 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000184 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 if (do_clinit_) {
186 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
187 } else {
188 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
189 }
190
191 // Move the class to the desired location.
192 Location out = locations->Out();
193 if (out.IsValid()) {
194 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000195 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700196 mips64_codegen->MoveLocation(out,
197 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
198 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700199 }
200
201 RestoreLiveRegisters(codegen, locations);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000202 // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
203 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
204 if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
205 DCHECK(out.IsValid());
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000206 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000207 mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000208 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
209 __ Sw(out.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
210 }
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700211 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700212 }
213
Roland Levillain46648892015-06-19 16:07:18 +0100214 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
215
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 private:
217 // The class this slow path will load.
218 HLoadClass* const cls_;
219
Alexey Frunze4dda3372015-06-01 18:31:49 -0700220 // The dex PC of `at_`.
221 const uint32_t dex_pc_;
222
223 // Whether to initialize the class.
224 const bool do_clinit_;
225
226 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
227};
228
229class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
230 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000231 explicit LoadStringSlowPathMIPS64(HLoadString* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700232
233 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
234 LocationSummary* locations = instruction_->GetLocations();
235 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
236 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
237
238 __ Bind(GetEntryLabel());
239 SaveLiveRegisters(codegen, locations);
240
241 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzef63f5692016-12-13 17:43:11 -0800242 HLoadString* load = instruction_->AsLoadString();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000243 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
244 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100245 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700246 instruction_,
247 instruction_->GetDexPc(),
248 this);
249 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
250 Primitive::Type type = instruction_->GetType();
251 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700252 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700253 type);
254
255 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800256
257 // Store the resolved String to the BSS entry.
Alexey Frunzef63f5692016-12-13 17:43:11 -0800258 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
Alexey Frunzef63f5692016-12-13 17:43:11 -0800259 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
260 mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
261 mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
262 __ Sw(out, AT, /* placeholder */ 0x5678);
263
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700264 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700265 }
266
Roland Levillain46648892015-06-19 16:07:18 +0100267 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
268
Alexey Frunze4dda3372015-06-01 18:31:49 -0700269 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700270 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
271};
272
273class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
274 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000275 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700276
277 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
278 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
279 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000280 if (instruction_->CanThrowIntoCatchBlock()) {
281 // Live registers will be restored in the catch block if caught.
282 SaveLiveRegisters(codegen, instruction_->GetLocations());
283 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100284 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700285 instruction_,
286 instruction_->GetDexPc(),
287 this);
288 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
289 }
290
Alexandre Rames8158f282015-08-07 10:26:17 +0100291 bool IsFatal() const OVERRIDE { return true; }
292
Roland Levillain46648892015-06-19 16:07:18 +0100293 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
294
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700296 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
297};
298
299class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
300 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100301 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000302 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700303
304 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
305 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
306 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100307 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700308 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700309 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700310 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700311 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700312 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313 }
314 }
315
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700316 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700317 DCHECK(successor_ == nullptr);
318 return &return_label_;
319 }
320
Roland Levillain46648892015-06-19 16:07:18 +0100321 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
322
Alexey Frunze4dda3372015-06-01 18:31:49 -0700323 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700324 // If not null, the block to branch to after the suspend check.
325 HBasicBlock* const successor_;
326
327 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700328 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700329
330 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
331};
332
333class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
334 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800335 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
336 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337
338 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
339 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800340
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100341 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700342 DCHECK(instruction_->IsCheckCast()
343 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
344 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
345
346 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800347 if (!is_fatal_) {
348 SaveLiveRegisters(codegen, locations);
349 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350
351 // We're moving two locations to locations that could overlap, so we need a parallel
352 // move resolver.
353 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800354 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700355 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
356 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800357 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
359 Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700360 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100361 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700363 Primitive::Type ret_type = instruction_->GetType();
364 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
365 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366 } else {
367 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800368 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
369 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700370 }
371
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800372 if (!is_fatal_) {
373 RestoreLiveRegisters(codegen, locations);
374 __ Bc(GetExitLabel());
375 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700376 }
377
Roland Levillain46648892015-06-19 16:07:18 +0100378 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
379
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800380 bool IsFatal() const OVERRIDE { return is_fatal_; }
381
Alexey Frunze4dda3372015-06-01 18:31:49 -0700382 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800383 const bool is_fatal_;
384
Alexey Frunze4dda3372015-06-01 18:31:49 -0700385 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
386};
387
388class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
389 public:
Aart Bik42249c32016-01-07 15:33:50 -0800390 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000391 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700392
393 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800394 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700395 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100396 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Roland Levillain888d0672015-11-23 18:53:50 +0000397 CheckEntrypointTypes<kQuickDeoptimize, void, void>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700398 }
399
Roland Levillain46648892015-06-19 16:07:18 +0100400 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
401
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700403 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
404};
405
Alexey Frunze15958152017-02-09 19:08:30 -0800406class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
407 public:
408 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
409
410 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
411 LocationSummary* locations = instruction_->GetLocations();
412 __ Bind(GetEntryLabel());
413 SaveLiveRegisters(codegen, locations);
414
415 InvokeRuntimeCallingConvention calling_convention;
416 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
417 parallel_move.AddMove(
418 locations->InAt(0),
419 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
420 Primitive::kPrimNot,
421 nullptr);
422 parallel_move.AddMove(
423 locations->InAt(1),
424 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
425 Primitive::kPrimInt,
426 nullptr);
427 parallel_move.AddMove(
428 locations->InAt(2),
429 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
430 Primitive::kPrimNot,
431 nullptr);
432 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
433
434 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
435 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
436 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
437 RestoreLiveRegisters(codegen, locations);
438 __ Bc(GetExitLabel());
439 }
440
441 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
442
443 private:
444 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
445};
446
447// Slow path marking an object reference `ref` during a read
448// barrier. The field `obj.field` in the object `obj` holding this
449// reference does not get updated by this slow path after marking (see
450// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
451//
452// This means that after the execution of this slow path, `ref` will
453// always be up-to-date, but `obj.field` may not; i.e., after the
454// flip, `ref` will be a to-space reference, but `obj.field` will
455// probably still be a from-space reference (unless it gets updated by
456// another thread, or if another thread installed another object
457// reference (different from `ref`) in `obj.field`).
458//
459// If `entrypoint` is a valid location it is assumed to already be
460// holding the entrypoint. The case where the entrypoint is passed in
461// is for the GcRoot read barrier.
462class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
463 public:
464 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
465 Location ref,
466 Location entrypoint = Location::NoLocation())
467 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
468 DCHECK(kEmitCompilerReadBarrier);
469 }
470
471 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
472
473 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
474 LocationSummary* locations = instruction_->GetLocations();
475 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
476 DCHECK(locations->CanCall());
477 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
478 DCHECK(instruction_->IsInstanceFieldGet() ||
479 instruction_->IsStaticFieldGet() ||
480 instruction_->IsArrayGet() ||
481 instruction_->IsArraySet() ||
482 instruction_->IsLoadClass() ||
483 instruction_->IsLoadString() ||
484 instruction_->IsInstanceOf() ||
485 instruction_->IsCheckCast() ||
486 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
487 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
488 << "Unexpected instruction in read barrier marking slow path: "
489 << instruction_->DebugName();
490
491 __ Bind(GetEntryLabel());
492 // No need to save live registers; it's taken care of by the
493 // entrypoint. Also, there is no need to update the stack mask,
494 // as this runtime call will not trigger a garbage collection.
495 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
496 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
497 (S2 <= ref_reg && ref_reg <= S7) ||
498 (ref_reg == S8)) << ref_reg;
499 // "Compact" slow path, saving two moves.
500 //
501 // Instead of using the standard runtime calling convention (input
502 // and output in A0 and V0 respectively):
503 //
504 // A0 <- ref
505 // V0 <- ReadBarrierMark(A0)
506 // ref <- V0
507 //
508 // we just use rX (the register containing `ref`) as input and output
509 // of a dedicated entrypoint:
510 //
511 // rX <- ReadBarrierMarkRegX(rX)
512 //
513 if (entrypoint_.IsValid()) {
514 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
515 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
516 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
517 __ Nop();
518 } else {
519 int32_t entry_point_offset =
520 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
521 // This runtime call does not require a stack map.
522 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
523 instruction_,
524 this);
525 }
526 __ Bc(GetExitLabel());
527 }
528
529 private:
530 // The location (register) of the marked object reference.
531 const Location ref_;
532
533 // The location of the entrypoint if already loaded.
534 const Location entrypoint_;
535
536 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
537};
538
539// Slow path marking an object reference `ref` during a read barrier,
540// and if needed, atomically updating the field `obj.field` in the
541// object `obj` holding this reference after marking (contrary to
542// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
543// `obj.field`).
544//
545// This means that after the execution of this slow path, both `ref`
546// and `obj.field` will be up-to-date; i.e., after the flip, both will
547// hold the same to-space reference (unless another thread installed
548// another object reference (different from `ref`) in `obj.field`).
549class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
550 public:
551 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
552 Location ref,
553 GpuRegister obj,
554 Location field_offset,
555 GpuRegister temp1)
556 : SlowPathCodeMIPS64(instruction),
557 ref_(ref),
558 obj_(obj),
559 field_offset_(field_offset),
560 temp1_(temp1) {
561 DCHECK(kEmitCompilerReadBarrier);
562 }
563
564 const char* GetDescription() const OVERRIDE {
565 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
566 }
567
568 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
569 LocationSummary* locations = instruction_->GetLocations();
570 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
571 DCHECK(locations->CanCall());
572 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
573 // This slow path is only used by the UnsafeCASObject intrinsic.
574 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
575 << "Unexpected instruction in read barrier marking and field updating slow path: "
576 << instruction_->DebugName();
577 DCHECK(instruction_->GetLocations()->Intrinsified());
578 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
579 DCHECK(field_offset_.IsRegister()) << field_offset_;
580
581 __ Bind(GetEntryLabel());
582
583 // Save the old reference.
584 // Note that we cannot use AT or TMP to save the old reference, as those
585 // are used by the code that follows, but we need the old reference after
586 // the call to the ReadBarrierMarkRegX entry point.
587 DCHECK_NE(temp1_, AT);
588 DCHECK_NE(temp1_, TMP);
589 __ Move(temp1_, ref_reg);
590
591 // No need to save live registers; it's taken care of by the
592 // entrypoint. Also, there is no need to update the stack mask,
593 // as this runtime call will not trigger a garbage collection.
594 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
595 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
596 (S2 <= ref_reg && ref_reg <= S7) ||
597 (ref_reg == S8)) << ref_reg;
598 // "Compact" slow path, saving two moves.
599 //
600 // Instead of using the standard runtime calling convention (input
601 // and output in A0 and V0 respectively):
602 //
603 // A0 <- ref
604 // V0 <- ReadBarrierMark(A0)
605 // ref <- V0
606 //
607 // we just use rX (the register containing `ref`) as input and output
608 // of a dedicated entrypoint:
609 //
610 // rX <- ReadBarrierMarkRegX(rX)
611 //
612 int32_t entry_point_offset =
613 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
614 // This runtime call does not require a stack map.
615 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
616 instruction_,
617 this);
618
619 // If the new reference is different from the old reference,
620 // update the field in the holder (`*(obj_ + field_offset_)`).
621 //
622 // Note that this field could also hold a different object, if
623 // another thread had concurrently changed it. In that case, the
624 // the compare-and-set (CAS) loop below would abort, leaving the
625 // field as-is.
626 Mips64Label done;
627 __ Beqc(temp1_, ref_reg, &done);
628
629 // Update the the holder's field atomically. This may fail if
630 // mutator updates before us, but it's OK. This is achieved
631 // using a strong compare-and-set (CAS) operation with relaxed
632 // memory synchronization ordering, where the expected value is
633 // the old reference and the desired value is the new reference.
634
635 // Convenience aliases.
636 GpuRegister base = obj_;
637 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
638 GpuRegister expected = temp1_;
639 GpuRegister value = ref_reg;
640 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
641 GpuRegister tmp = AT; // Value in memory.
642
643 __ Daddu(tmp_ptr, base, offset);
644
645 if (kPoisonHeapReferences) {
646 __ PoisonHeapReference(expected);
647 // Do not poison `value` if it is the same register as
648 // `expected`, which has just been poisoned.
649 if (value != expected) {
650 __ PoisonHeapReference(value);
651 }
652 }
653
654 // do {
655 // tmp = [r_ptr] - expected;
656 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
657
658 Mips64Label loop_head, exit_loop;
659 __ Bind(&loop_head);
660 __ Ll(tmp, tmp_ptr);
661 // The LL instruction sign-extends the 32-bit value, but
662 // 32-bit references must be zero-extended. Zero-extend `tmp`.
663 __ Dext(tmp, tmp, 0, 32);
664 __ Bnec(tmp, expected, &exit_loop);
665 __ Move(tmp, value);
666 __ Sc(tmp, tmp_ptr);
667 __ Beqzc(tmp, &loop_head);
668 __ Bind(&exit_loop);
669
670 if (kPoisonHeapReferences) {
671 __ UnpoisonHeapReference(expected);
672 // Do not unpoison `value` if it is the same register as
673 // `expected`, which has just been unpoisoned.
674 if (value != expected) {
675 __ UnpoisonHeapReference(value);
676 }
677 }
678
679 __ Bind(&done);
680 __ Bc(GetExitLabel());
681 }
682
683 private:
684 // The location (register) of the marked object reference.
685 const Location ref_;
686 // The register containing the object holding the marked object reference field.
687 const GpuRegister obj_;
688 // The location of the offset of the marked reference field within `obj_`.
689 Location field_offset_;
690
691 const GpuRegister temp1_;
692
693 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
694};
695
696// Slow path generating a read barrier for a heap reference.
697class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
698 public:
699 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
700 Location out,
701 Location ref,
702 Location obj,
703 uint32_t offset,
704 Location index)
705 : SlowPathCodeMIPS64(instruction),
706 out_(out),
707 ref_(ref),
708 obj_(obj),
709 offset_(offset),
710 index_(index) {
711 DCHECK(kEmitCompilerReadBarrier);
712 // If `obj` is equal to `out` or `ref`, it means the initial object
713 // has been overwritten by (or after) the heap object reference load
714 // to be instrumented, e.g.:
715 //
716 // __ LoadFromOffset(kLoadWord, out, out, offset);
717 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
718 //
719 // In that case, we have lost the information about the original
720 // object, and the emitted read barrier cannot work properly.
721 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
722 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
723 }
724
725 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
726 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
727 LocationSummary* locations = instruction_->GetLocations();
728 Primitive::Type type = Primitive::kPrimNot;
729 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
730 DCHECK(locations->CanCall());
731 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
732 DCHECK(instruction_->IsInstanceFieldGet() ||
733 instruction_->IsStaticFieldGet() ||
734 instruction_->IsArrayGet() ||
735 instruction_->IsInstanceOf() ||
736 instruction_->IsCheckCast() ||
737 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
738 << "Unexpected instruction in read barrier for heap reference slow path: "
739 << instruction_->DebugName();
740
741 __ Bind(GetEntryLabel());
742 SaveLiveRegisters(codegen, locations);
743
744 // We may have to change the index's value, but as `index_` is a
745 // constant member (like other "inputs" of this slow path),
746 // introduce a copy of it, `index`.
747 Location index = index_;
748 if (index_.IsValid()) {
749 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
750 if (instruction_->IsArrayGet()) {
751 // Compute the actual memory offset and store it in `index`.
752 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
753 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
754 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
755 // We are about to change the value of `index_reg` (see the
756 // calls to art::mips64::Mips64Assembler::Sll and
757 // art::mips64::MipsAssembler::Addiu32 below), but it has
758 // not been saved by the previous call to
759 // art::SlowPathCode::SaveLiveRegisters, as it is a
760 // callee-save register --
761 // art::SlowPathCode::SaveLiveRegisters does not consider
762 // callee-save registers, as it has been designed with the
763 // assumption that callee-save registers are supposed to be
764 // handled by the called function. So, as a callee-save
765 // register, `index_reg` _would_ eventually be saved onto
766 // the stack, but it would be too late: we would have
767 // changed its value earlier. Therefore, we manually save
768 // it here into another freely available register,
769 // `free_reg`, chosen of course among the caller-save
770 // registers (as a callee-save `free_reg` register would
771 // exhibit the same problem).
772 //
773 // Note we could have requested a temporary register from
774 // the register allocator instead; but we prefer not to, as
775 // this is a slow path, and we know we can find a
776 // caller-save register that is available.
777 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
778 __ Move(free_reg, index_reg);
779 index_reg = free_reg;
780 index = Location::RegisterLocation(index_reg);
781 } else {
782 // The initial register stored in `index_` has already been
783 // saved in the call to art::SlowPathCode::SaveLiveRegisters
784 // (as it is not a callee-save register), so we can freely
785 // use it.
786 }
787 // Shifting the index value contained in `index_reg` by the scale
788 // factor (2) cannot overflow in practice, as the runtime is
789 // unable to allocate object arrays with a size larger than
790 // 2^26 - 1 (that is, 2^28 - 4 bytes).
791 __ Sll(index_reg, index_reg, TIMES_4);
792 static_assert(
793 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
794 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
795 __ Addiu32(index_reg, index_reg, offset_);
796 } else {
797 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
798 // intrinsics, `index_` is not shifted by a scale factor of 2
799 // (as in the case of ArrayGet), as it is actually an offset
800 // to an object field within an object.
801 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
802 DCHECK(instruction_->GetLocations()->Intrinsified());
803 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
804 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
805 << instruction_->AsInvoke()->GetIntrinsic();
806 DCHECK_EQ(offset_, 0U);
807 DCHECK(index_.IsRegister());
808 }
809 }
810
811 // We're moving two or three locations to locations that could
812 // overlap, so we need a parallel move resolver.
813 InvokeRuntimeCallingConvention calling_convention;
814 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
815 parallel_move.AddMove(ref_,
816 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
817 Primitive::kPrimNot,
818 nullptr);
819 parallel_move.AddMove(obj_,
820 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
821 Primitive::kPrimNot,
822 nullptr);
823 if (index.IsValid()) {
824 parallel_move.AddMove(index,
825 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
826 Primitive::kPrimInt,
827 nullptr);
828 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
829 } else {
830 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
831 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
832 }
833 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
834 instruction_,
835 instruction_->GetDexPc(),
836 this);
837 CheckEntrypointTypes<
838 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
839 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
840
841 RestoreLiveRegisters(codegen, locations);
842 __ Bc(GetExitLabel());
843 }
844
845 const char* GetDescription() const OVERRIDE {
846 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
847 }
848
849 private:
850 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
851 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
852 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
853 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
854 if (i != ref &&
855 i != obj &&
856 !codegen->IsCoreCalleeSaveRegister(i) &&
857 !codegen->IsBlockedCoreRegister(i)) {
858 return static_cast<GpuRegister>(i);
859 }
860 }
861 // We shall never fail to find a free caller-save register, as
862 // there are more than two core caller-save registers on MIPS64
863 // (meaning it is possible to find one which is different from
864 // `ref` and `obj`).
865 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
866 LOG(FATAL) << "Could not find a free caller-save register";
867 UNREACHABLE();
868 }
869
870 const Location out_;
871 const Location ref_;
872 const Location obj_;
873 const uint32_t offset_;
874 // An additional location containing an index to an array.
875 // Only used for HArrayGet and the UnsafeGetObject &
876 // UnsafeGetObjectVolatile intrinsics.
877 const Location index_;
878
879 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
880};
881
882// Slow path generating a read barrier for a GC root.
883class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
884 public:
885 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
886 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
887 DCHECK(kEmitCompilerReadBarrier);
888 }
889
890 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
891 LocationSummary* locations = instruction_->GetLocations();
892 Primitive::Type type = Primitive::kPrimNot;
893 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
894 DCHECK(locations->CanCall());
895 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
896 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
897 << "Unexpected instruction in read barrier for GC root slow path: "
898 << instruction_->DebugName();
899
900 __ Bind(GetEntryLabel());
901 SaveLiveRegisters(codegen, locations);
902
903 InvokeRuntimeCallingConvention calling_convention;
904 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
905 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
906 root_,
907 Primitive::kPrimNot);
908 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
909 instruction_,
910 instruction_->GetDexPc(),
911 this);
912 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
913 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
914
915 RestoreLiveRegisters(codegen, locations);
916 __ Bc(GetExitLabel());
917 }
918
919 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
920
921 private:
922 const Location out_;
923 const Location root_;
924
925 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
926};
927
Alexey Frunze4dda3372015-06-01 18:31:49 -0700928CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
929 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100930 const CompilerOptions& compiler_options,
931 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700932 : CodeGenerator(graph,
933 kNumberOfGpuRegisters,
934 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000935 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700936 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
937 arraysize(kCoreCalleeSaves)),
938 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
939 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100940 compiler_options,
941 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100942 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700943 location_builder_(graph, this),
944 instruction_visitor_(graph, this),
945 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +0100946 assembler_(graph->GetArena()),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800947 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800948 uint32_literals_(std::less<uint32_t>(),
949 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800950 uint64_literals_(std::less<uint64_t>(),
951 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800952 pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800953 boot_image_string_patches_(StringReferenceValueComparator(),
954 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
955 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
956 boot_image_type_patches_(TypeReferenceValueComparator(),
957 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
958 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +0000959 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800960 jit_string_patches_(StringReferenceValueComparator(),
961 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
962 jit_class_patches_(TypeReferenceValueComparator(),
963 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700964 // Save RA (containing the return address) to mimic Quick.
965 AddAllocatedRegister(Location::RegisterLocation(RA));
966}
967
968#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100969// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
970#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700971#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700972
973void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700974 // Ensure that we fix up branches.
975 __ FinalizeCode();
976
977 // Adjust native pc offsets in stack maps.
978 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -0800979 uint32_t old_position =
980 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700981 uint32_t new_position = __ GetAdjustedPosition(old_position);
982 DCHECK_GE(new_position, old_position);
983 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
984 }
985
986 // Adjust pc offsets for the disassembly information.
987 if (disasm_info_ != nullptr) {
988 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
989 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
990 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
991 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
992 it.second.start = __ GetAdjustedPosition(it.second.start);
993 it.second.end = __ GetAdjustedPosition(it.second.end);
994 }
995 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
996 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
997 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
998 }
999 }
1000
Alexey Frunze4dda3372015-06-01 18:31:49 -07001001 CodeGenerator::Finalize(allocator);
1002}
1003
1004Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1005 return codegen_->GetAssembler();
1006}
1007
1008void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001009 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001010 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1011}
1012
1013void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001014 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001015 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1016}
1017
1018void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1019 // Pop reg
1020 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001021 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001022}
1023
1024void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1025 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001026 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001027 __ Sd(GpuRegister(reg), SP, 0);
1028}
1029
1030void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1031 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1032 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1033 // Allocate a scratch register other than TMP, if available.
1034 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1035 // automatically unspilled when the scratch scope object is destroyed).
1036 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1037 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001038 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001039 __ LoadFromOffset(load_type,
1040 GpuRegister(ensure_scratch.GetRegister()),
1041 SP,
1042 index1 + stack_offset);
1043 __ LoadFromOffset(load_type,
1044 TMP,
1045 SP,
1046 index2 + stack_offset);
1047 __ StoreToOffset(store_type,
1048 GpuRegister(ensure_scratch.GetRegister()),
1049 SP,
1050 index2 + stack_offset);
1051 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1052}
1053
1054static dwarf::Reg DWARFReg(GpuRegister reg) {
1055 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1056}
1057
David Srbeckyba702002016-02-01 18:15:29 +00001058static dwarf::Reg DWARFReg(FpuRegister reg) {
1059 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1060}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001061
1062void CodeGeneratorMIPS64::GenerateFrameEntry() {
1063 __ Bind(&frame_entry_label_);
1064
1065 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips64) || !IsLeafMethod();
1066
1067 if (do_overflow_check) {
1068 __ LoadFromOffset(kLoadWord,
1069 ZERO,
1070 SP,
1071 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips64)));
1072 RecordPcInfo(nullptr, 0);
1073 }
1074
Alexey Frunze4dda3372015-06-01 18:31:49 -07001075 if (HasEmptyFrame()) {
1076 return;
1077 }
1078
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001079 // Make sure the frame size isn't unreasonably large.
1080 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips64)) {
1081 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips64) << " bytes";
1082 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001083
1084 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001085
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001086 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001087 __ IncreaseFrameSize(ofs);
1088
1089 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1090 GpuRegister reg = kCoreCalleeSaves[i];
1091 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001092 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001093 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001094 __ cfi().RelOffset(DWARFReg(reg), ofs);
1095 }
1096 }
1097
1098 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1099 FpuRegister reg = kFpuCalleeSaves[i];
1100 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001101 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001102 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001103 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001104 }
1105 }
1106
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001107 // Save the current method if we need it. Note that we do not
1108 // do this in HCurrentMethod, as the instruction might have been removed
1109 // in the SSA graph.
1110 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001111 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001112 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001113
1114 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1115 // Initialize should_deoptimize flag to 0.
1116 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1117 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001118}
1119
1120void CodeGeneratorMIPS64::GenerateFrameExit() {
1121 __ cfi().RememberState();
1122
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001124 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001125
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001126 // For better instruction scheduling restore RA before other registers.
1127 uint32_t ofs = GetFrameSize();
1128 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001129 GpuRegister reg = kCoreCalleeSaves[i];
1130 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001131 ofs -= kMips64DoublewordSize;
1132 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001133 __ cfi().Restore(DWARFReg(reg));
1134 }
1135 }
1136
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001137 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1138 FpuRegister reg = kFpuCalleeSaves[i];
1139 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1140 ofs -= kMips64DoublewordSize;
1141 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1142 __ cfi().Restore(DWARFReg(reg));
1143 }
1144 }
1145
1146 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001147 }
1148
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001149 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001150
1151 __ cfi().RestoreState();
1152 __ cfi().DefCFAOffset(GetFrameSize());
1153}
1154
1155void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1156 __ Bind(GetLabelOf(block));
1157}
1158
1159void CodeGeneratorMIPS64::MoveLocation(Location destination,
1160 Location source,
Calin Juravlee460d1d2015-09-29 04:52:17 +01001161 Primitive::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001162 if (source.Equals(destination)) {
1163 return;
1164 }
1165
1166 // A valid move can always be inferred from the destination and source
1167 // locations. When moving from and to a register, the argument type can be
1168 // used to generate 32bit instead of 64bit moves.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001169 bool unspecified_type = (dst_type == Primitive::kPrimVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001170 DCHECK_EQ(unspecified_type, false);
1171
1172 if (destination.IsRegister() || destination.IsFpuRegister()) {
1173 if (unspecified_type) {
1174 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1175 if (source.IsStackSlot() ||
1176 (src_cst != nullptr && (src_cst->IsIntConstant()
1177 || src_cst->IsFloatConstant()
1178 || src_cst->IsNullConstant()))) {
1179 // For stack slots and 32bit constants, a 64bit type is appropriate.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001180 dst_type = destination.IsRegister() ? Primitive::kPrimInt : Primitive::kPrimFloat;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001181 } else {
1182 // If the source is a double stack slot or a 64bit constant, a 64bit
1183 // type is appropriate. Else the source is a register, and since the
1184 // type has not been specified, we chose a 64bit type to force a 64bit
1185 // move.
Calin Juravlee460d1d2015-09-29 04:52:17 +01001186 dst_type = destination.IsRegister() ? Primitive::kPrimLong : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001187 }
1188 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001189 DCHECK((destination.IsFpuRegister() && Primitive::IsFloatingPointType(dst_type)) ||
1190 (destination.IsRegister() && !Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001191 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1192 // Move to GPR/FPR from stack
1193 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001194 if (Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001195 __ LoadFpuFromOffset(load_type,
1196 destination.AsFpuRegister<FpuRegister>(),
1197 SP,
1198 source.GetStackIndex());
1199 } else {
1200 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1201 __ LoadFromOffset(load_type,
1202 destination.AsRegister<GpuRegister>(),
1203 SP,
1204 source.GetStackIndex());
1205 }
1206 } else if (source.IsConstant()) {
1207 // Move to GPR/FPR from constant
1208 GpuRegister gpr = AT;
Calin Juravlee460d1d2015-09-29 04:52:17 +01001209 if (!Primitive::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001210 gpr = destination.AsRegister<GpuRegister>();
1211 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001212 if (dst_type == Primitive::kPrimInt || dst_type == Primitive::kPrimFloat) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001213 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
1214 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1215 gpr = ZERO;
1216 } else {
1217 __ LoadConst32(gpr, value);
1218 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001219 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001220 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
1221 if (Primitive::IsFloatingPointType(dst_type) && value == 0) {
1222 gpr = ZERO;
1223 } else {
1224 __ LoadConst64(gpr, value);
1225 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001226 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001227 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001228 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Calin Juravlee460d1d2015-09-29 04:52:17 +01001229 } else if (dst_type == Primitive::kPrimDouble) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001230 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1231 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001232 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001233 if (destination.IsRegister()) {
1234 // Move to GPR from GPR
1235 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1236 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001237 DCHECK(destination.IsFpuRegister());
1238 if (Primitive::Is64BitType(dst_type)) {
1239 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1240 } else {
1241 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1242 }
1243 }
1244 } else if (source.IsFpuRegister()) {
1245 if (destination.IsFpuRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001246 // Move to FPR from FPR
Calin Juravlee460d1d2015-09-29 04:52:17 +01001247 if (dst_type == Primitive::kPrimFloat) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001248 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1249 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001250 DCHECK_EQ(dst_type, Primitive::kPrimDouble);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001251 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1252 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001253 } else {
1254 DCHECK(destination.IsRegister());
1255 if (Primitive::Is64BitType(dst_type)) {
1256 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1257 } else {
1258 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1259 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001260 }
1261 }
1262 } else { // The destination is not a register. It must be a stack slot.
1263 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1264 if (source.IsRegister() || source.IsFpuRegister()) {
1265 if (unspecified_type) {
1266 if (source.IsRegister()) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001267 dst_type = destination.IsStackSlot() ? Primitive::kPrimInt : Primitive::kPrimLong;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001268 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001269 dst_type = destination.IsStackSlot() ? Primitive::kPrimFloat : Primitive::kPrimDouble;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001270 }
1271 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001272 DCHECK((destination.IsDoubleStackSlot() == Primitive::Is64BitType(dst_type)) &&
1273 (source.IsFpuRegister() == Primitive::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001274 // Move to stack from GPR/FPR
1275 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1276 if (source.IsRegister()) {
1277 __ StoreToOffset(store_type,
1278 source.AsRegister<GpuRegister>(),
1279 SP,
1280 destination.GetStackIndex());
1281 } else {
1282 __ StoreFpuToOffset(store_type,
1283 source.AsFpuRegister<FpuRegister>(),
1284 SP,
1285 destination.GetStackIndex());
1286 }
1287 } else if (source.IsConstant()) {
1288 // Move to stack from constant
1289 HConstant* src_cst = source.GetConstant();
1290 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001291 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001292 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001293 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1294 if (value != 0) {
1295 gpr = TMP;
1296 __ LoadConst32(gpr, value);
1297 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001298 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001299 DCHECK(destination.IsDoubleStackSlot());
1300 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1301 if (value != 0) {
1302 gpr = TMP;
1303 __ LoadConst64(gpr, value);
1304 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001305 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001306 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001307 } else {
1308 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1309 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1310 // Move to stack from stack
1311 if (destination.IsStackSlot()) {
1312 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1313 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1314 } else {
1315 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1316 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1317 }
1318 }
1319 }
1320}
1321
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001322void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, Primitive::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001323 DCHECK(!loc1.IsConstant());
1324 DCHECK(!loc2.IsConstant());
1325
1326 if (loc1.Equals(loc2)) {
1327 return;
1328 }
1329
1330 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1331 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
1332 bool is_fp_reg1 = loc1.IsFpuRegister();
1333 bool is_fp_reg2 = loc2.IsFpuRegister();
1334
1335 if (loc2.IsRegister() && loc1.IsRegister()) {
1336 // Swap 2 GPRs
1337 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1338 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1339 __ Move(TMP, r2);
1340 __ Move(r2, r1);
1341 __ Move(r1, TMP);
1342 } else if (is_fp_reg2 && is_fp_reg1) {
1343 // Swap 2 FPRs
1344 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1345 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001346 if (type == Primitive::kPrimFloat) {
1347 __ MovS(FTMP, r1);
1348 __ MovS(r1, r2);
1349 __ MovS(r2, FTMP);
1350 } else {
1351 DCHECK_EQ(type, Primitive::kPrimDouble);
1352 __ MovD(FTMP, r1);
1353 __ MovD(r1, r2);
1354 __ MovD(r2, FTMP);
1355 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 } else if (is_slot1 != is_slot2) {
1357 // Swap GPR/FPR and stack slot
1358 Location reg_loc = is_slot1 ? loc2 : loc1;
1359 Location mem_loc = is_slot1 ? loc1 : loc2;
1360 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1361 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1362 // TODO: use load_type = kLoadUnsignedWord when type == Primitive::kPrimNot.
1363 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1364 if (reg_loc.IsFpuRegister()) {
1365 __ StoreFpuToOffset(store_type,
1366 reg_loc.AsFpuRegister<FpuRegister>(),
1367 SP,
1368 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001369 if (mem_loc.IsStackSlot()) {
1370 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1371 } else {
1372 DCHECK(mem_loc.IsDoubleStackSlot());
1373 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1374 }
1375 } else {
1376 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1377 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1378 }
1379 } else if (is_slot1 && is_slot2) {
1380 move_resolver_.Exchange(loc1.GetStackIndex(),
1381 loc2.GetStackIndex(),
1382 loc1.IsDoubleStackSlot());
1383 } else {
1384 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1385 }
1386}
1387
Calin Juravle175dc732015-08-25 15:42:32 +01001388void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1389 DCHECK(location.IsRegister());
1390 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1391}
1392
Calin Juravlee460d1d2015-09-29 04:52:17 +01001393void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1394 if (location.IsRegister()) {
1395 locations->AddTemp(location);
1396 } else {
1397 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1398 }
1399}
1400
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001401void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1402 GpuRegister value,
1403 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001404 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001405 GpuRegister card = AT;
1406 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001407 if (value_can_be_null) {
1408 __ Beqzc(value, &done);
1409 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001410 __ LoadFromOffset(kLoadDoubleword,
1411 card,
1412 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001413 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001414 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1415 __ Daddu(temp, card, temp);
1416 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001417 if (value_can_be_null) {
1418 __ Bind(&done);
1419 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001420}
1421
Alexey Frunze19f6c692016-11-30 19:19:55 -08001422template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1423inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1424 const ArenaDeque<PcRelativePatchInfo>& infos,
1425 ArenaVector<LinkerPatch>* linker_patches) {
1426 for (const PcRelativePatchInfo& info : infos) {
1427 const DexFile& dex_file = info.target_dex_file;
1428 size_t offset_or_index = info.offset_or_index;
1429 DCHECK(info.pc_rel_label.IsBound());
1430 uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
1431 linker_patches->push_back(Factory(pc_rel_offset, &dex_file, pc_rel_offset, offset_or_index));
1432 }
1433}
1434
1435void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1436 DCHECK(linker_patches->empty());
1437 size_t size =
Alexey Frunze19f6c692016-11-30 19:19:55 -08001438 pc_relative_dex_cache_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001439 pc_relative_string_patches_.size() +
1440 pc_relative_type_patches_.size() +
Vladimir Marko1998cd02017-01-13 13:02:58 +00001441 type_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001442 boot_image_string_patches_.size() +
Richard Uhlerc52f3032017-03-02 13:45:45 +00001443 boot_image_type_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001444 linker_patches->reserve(size);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001445 EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
1446 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001447 if (!GetCompilerOptions().IsBootImage()) {
Vladimir Marko1998cd02017-01-13 13:02:58 +00001448 DCHECK(pc_relative_type_patches_.empty());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001449 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1450 linker_patches);
1451 } else {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001452 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1453 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001454 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1455 linker_patches);
1456 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001457 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1458 linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001459 for (const auto& entry : boot_image_string_patches_) {
1460 const StringReference& target_string = entry.first;
1461 Literal* literal = entry.second;
1462 DCHECK(literal->GetLabel()->IsBound());
1463 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
1464 linker_patches->push_back(LinkerPatch::StringPatch(literal_offset,
1465 target_string.dex_file,
1466 target_string.string_index.index_));
1467 }
1468 for (const auto& entry : boot_image_type_patches_) {
1469 const TypeReference& target_type = entry.first;
1470 Literal* literal = entry.second;
1471 DCHECK(literal->GetLabel()->IsBound());
1472 uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
1473 linker_patches->push_back(LinkerPatch::TypePatch(literal_offset,
1474 target_type.dex_file,
1475 target_type.type_index.index_));
1476 }
Vladimir Marko1998cd02017-01-13 13:02:58 +00001477 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001478}
1479
1480CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001481 const DexFile& dex_file, dex::StringIndex string_index) {
1482 return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001483}
1484
1485CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
1486 const DexFile& dex_file, dex::TypeIndex type_index) {
1487 return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001488}
1489
Vladimir Marko1998cd02017-01-13 13:02:58 +00001490CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
1491 const DexFile& dex_file, dex::TypeIndex type_index) {
1492 return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
1493}
1494
Alexey Frunze19f6c692016-11-30 19:19:55 -08001495CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
1496 const DexFile& dex_file, uint32_t element_offset) {
1497 return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
1498}
1499
Alexey Frunze19f6c692016-11-30 19:19:55 -08001500CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
1501 const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
1502 patches->emplace_back(dex_file, offset_or_index);
1503 return &patches->back();
1504}
1505
Alexey Frunzef63f5692016-12-13 17:43:11 -08001506Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1507 return map->GetOrCreate(
1508 value,
1509 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1510}
1511
Alexey Frunze19f6c692016-11-30 19:19:55 -08001512Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1513 return uint64_literals_.GetOrCreate(
1514 value,
1515 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1516}
1517
1518Literal* CodeGeneratorMIPS64::DeduplicateMethodLiteral(MethodReference target_method,
1519 MethodToLiteralMap* map) {
1520 return map->GetOrCreate(
1521 target_method,
1522 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1523}
1524
Alexey Frunzef63f5692016-12-13 17:43:11 -08001525Literal* CodeGeneratorMIPS64::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
1526 dex::StringIndex string_index) {
1527 return boot_image_string_patches_.GetOrCreate(
1528 StringReference(&dex_file, string_index),
1529 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1530}
1531
1532Literal* CodeGeneratorMIPS64::DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
1533 dex::TypeIndex type_index) {
1534 return boot_image_type_patches_.GetOrCreate(
1535 TypeReference(&dex_file, type_index),
1536 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1537}
1538
1539Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001540 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001541}
1542
Alexey Frunze19f6c692016-11-30 19:19:55 -08001543void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info,
1544 GpuRegister out) {
1545 __ Bind(&info->pc_rel_label);
1546 // Add the high half of a 32-bit offset to PC.
1547 __ Auipc(out, /* placeholder */ 0x1234);
1548 // The immediately following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001549 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze19f6c692016-11-30 19:19:55 -08001550}
1551
Alexey Frunze627c1a02017-01-30 19:28:14 -08001552Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1553 dex::StringIndex string_index,
1554 Handle<mirror::String> handle) {
1555 jit_string_roots_.Overwrite(StringReference(&dex_file, string_index),
1556 reinterpret_cast64<uint64_t>(handle.GetReference()));
1557 return jit_string_patches_.GetOrCreate(
1558 StringReference(&dex_file, string_index),
1559 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1560}
1561
1562Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1563 dex::TypeIndex type_index,
1564 Handle<mirror::Class> handle) {
1565 jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
1566 reinterpret_cast64<uint64_t>(handle.GetReference()));
1567 return jit_class_patches_.GetOrCreate(
1568 TypeReference(&dex_file, type_index),
1569 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1570}
1571
1572void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1573 const uint8_t* roots_data,
1574 const Literal* literal,
1575 uint64_t index_in_table) const {
1576 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1577 uintptr_t address =
1578 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1579 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1580}
1581
1582void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1583 for (const auto& entry : jit_string_patches_) {
1584 const auto& it = jit_string_roots_.find(entry.first);
1585 DCHECK(it != jit_string_roots_.end());
1586 PatchJitRootUse(code, roots_data, entry.second, it->second);
1587 }
1588 for (const auto& entry : jit_class_patches_) {
1589 const auto& it = jit_class_roots_.find(entry.first);
1590 DCHECK(it != jit_class_roots_.end());
1591 PatchJitRootUse(code, roots_data, entry.second, it->second);
1592 }
1593}
1594
David Brazdil58282f42016-01-14 12:45:10 +00001595void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001596 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1597 blocked_core_registers_[ZERO] = true;
1598 blocked_core_registers_[K0] = true;
1599 blocked_core_registers_[K1] = true;
1600 blocked_core_registers_[GP] = true;
1601 blocked_core_registers_[SP] = true;
1602 blocked_core_registers_[RA] = true;
1603
Lazar Trsicd9672662015-09-03 17:33:01 +02001604 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1605 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001606 blocked_core_registers_[AT] = true;
1607 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001608 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001609 blocked_fpu_registers_[FTMP] = true;
1610
1611 // Reserve suspend and thread registers.
1612 blocked_core_registers_[S0] = true;
1613 blocked_core_registers_[TR] = true;
1614
1615 // Reserve T9 for function calls
1616 blocked_core_registers_[T9] = true;
1617
Goran Jakovljevic782be112016-06-21 12:39:04 +02001618 if (GetGraph()->IsDebuggable()) {
1619 // Stubs do not save callee-save floating point registers. If the graph
1620 // is debuggable, we need to deal with these registers differently. For
1621 // now, just block them.
1622 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1623 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1624 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001625 }
1626}
1627
Alexey Frunze4dda3372015-06-01 18:31:49 -07001628size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1629 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001630 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001631}
1632
1633size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1634 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001635 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001636}
1637
1638size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1639 __ StoreFpuToOffset(kStoreDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001640 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001641}
1642
1643size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1644 __ LoadFpuFromOffset(kLoadDoubleword, FpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001645 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001646}
1647
1648void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001649 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001650}
1651
1652void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001653 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001654}
1655
Calin Juravle175dc732015-08-25 15:42:32 +01001656void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001657 HInstruction* instruction,
1658 uint32_t dex_pc,
1659 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001660 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001661 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001662 if (EntrypointRequiresStackMap(entrypoint)) {
1663 RecordPcInfo(instruction, dex_pc, slow_path);
1664 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001665}
1666
Alexey Frunze15958152017-02-09 19:08:30 -08001667void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1668 HInstruction* instruction,
1669 SlowPathCode* slow_path) {
1670 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1671 GenerateInvokeRuntime(entry_point_offset);
1672}
1673
1674void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1675 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1676 __ Jalr(T9);
1677 __ Nop();
1678}
1679
Alexey Frunze4dda3372015-06-01 18:31:49 -07001680void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1681 GpuRegister class_reg) {
1682 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1683 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1684 __ Bltc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001685 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1686 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001687 __ Bind(slow_path->GetExitLabel());
1688}
1689
1690void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1691 __ Sync(0); // only stype 0 is supported
1692}
1693
1694void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1695 HBasicBlock* successor) {
1696 SuspendCheckSlowPathMIPS64* slow_path =
1697 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS64(instruction, successor);
1698 codegen_->AddSlowPath(slow_path);
1699
1700 __ LoadFromOffset(kLoadUnsignedHalfword,
1701 TMP,
1702 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001703 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001704 if (successor == nullptr) {
1705 __ Bnezc(TMP, slow_path->GetEntryLabel());
1706 __ Bind(slow_path->GetReturnLabel());
1707 } else {
1708 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001709 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001710 // slow_path will return to GetLabelOf(successor).
1711 }
1712}
1713
1714InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1715 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001716 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001717 assembler_(codegen->GetAssembler()),
1718 codegen_(codegen) {}
1719
1720void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1721 DCHECK_EQ(instruction->InputCount(), 2U);
1722 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1723 Primitive::Type type = instruction->GetResultType();
1724 switch (type) {
1725 case Primitive::kPrimInt:
1726 case Primitive::kPrimLong: {
1727 locations->SetInAt(0, Location::RequiresRegister());
1728 HInstruction* right = instruction->InputAt(1);
1729 bool can_use_imm = false;
1730 if (right->IsConstant()) {
1731 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1732 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1733 can_use_imm = IsUint<16>(imm);
1734 } else if (instruction->IsAdd()) {
1735 can_use_imm = IsInt<16>(imm);
1736 } else {
1737 DCHECK(instruction->IsSub());
1738 can_use_imm = IsInt<16>(-imm);
1739 }
1740 }
1741 if (can_use_imm)
1742 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1743 else
1744 locations->SetInAt(1, Location::RequiresRegister());
1745 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1746 }
1747 break;
1748
1749 case Primitive::kPrimFloat:
1750 case Primitive::kPrimDouble:
1751 locations->SetInAt(0, Location::RequiresFpuRegister());
1752 locations->SetInAt(1, Location::RequiresFpuRegister());
1753 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1754 break;
1755
1756 default:
1757 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1758 }
1759}
1760
1761void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1762 Primitive::Type type = instruction->GetType();
1763 LocationSummary* locations = instruction->GetLocations();
1764
1765 switch (type) {
1766 case Primitive::kPrimInt:
1767 case Primitive::kPrimLong: {
1768 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1769 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1770 Location rhs_location = locations->InAt(1);
1771
1772 GpuRegister rhs_reg = ZERO;
1773 int64_t rhs_imm = 0;
1774 bool use_imm = rhs_location.IsConstant();
1775 if (use_imm) {
1776 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1777 } else {
1778 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1779 }
1780
1781 if (instruction->IsAnd()) {
1782 if (use_imm)
1783 __ Andi(dst, lhs, rhs_imm);
1784 else
1785 __ And(dst, lhs, rhs_reg);
1786 } else if (instruction->IsOr()) {
1787 if (use_imm)
1788 __ Ori(dst, lhs, rhs_imm);
1789 else
1790 __ Or(dst, lhs, rhs_reg);
1791 } else if (instruction->IsXor()) {
1792 if (use_imm)
1793 __ Xori(dst, lhs, rhs_imm);
1794 else
1795 __ Xor(dst, lhs, rhs_reg);
1796 } else if (instruction->IsAdd()) {
1797 if (type == Primitive::kPrimInt) {
1798 if (use_imm)
1799 __ Addiu(dst, lhs, rhs_imm);
1800 else
1801 __ Addu(dst, lhs, rhs_reg);
1802 } else {
1803 if (use_imm)
1804 __ Daddiu(dst, lhs, rhs_imm);
1805 else
1806 __ Daddu(dst, lhs, rhs_reg);
1807 }
1808 } else {
1809 DCHECK(instruction->IsSub());
1810 if (type == Primitive::kPrimInt) {
1811 if (use_imm)
1812 __ Addiu(dst, lhs, -rhs_imm);
1813 else
1814 __ Subu(dst, lhs, rhs_reg);
1815 } else {
1816 if (use_imm)
1817 __ Daddiu(dst, lhs, -rhs_imm);
1818 else
1819 __ Dsubu(dst, lhs, rhs_reg);
1820 }
1821 }
1822 break;
1823 }
1824 case Primitive::kPrimFloat:
1825 case Primitive::kPrimDouble: {
1826 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
1827 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
1828 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
1829 if (instruction->IsAdd()) {
1830 if (type == Primitive::kPrimFloat)
1831 __ AddS(dst, lhs, rhs);
1832 else
1833 __ AddD(dst, lhs, rhs);
1834 } else if (instruction->IsSub()) {
1835 if (type == Primitive::kPrimFloat)
1836 __ SubS(dst, lhs, rhs);
1837 else
1838 __ SubD(dst, lhs, rhs);
1839 } else {
1840 LOG(FATAL) << "Unexpected floating-point binary operation";
1841 }
1842 break;
1843 }
1844 default:
1845 LOG(FATAL) << "Unexpected binary operation type " << type;
1846 }
1847}
1848
1849void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001850 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001851
1852 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
1853 Primitive::Type type = instr->GetResultType();
1854 switch (type) {
1855 case Primitive::kPrimInt:
1856 case Primitive::kPrimLong: {
1857 locations->SetInAt(0, Location::RequiresRegister());
1858 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001859 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001860 break;
1861 }
1862 default:
1863 LOG(FATAL) << "Unexpected shift type " << type;
1864 }
1865}
1866
1867void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08001868 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001869 LocationSummary* locations = instr->GetLocations();
1870 Primitive::Type type = instr->GetType();
1871
1872 switch (type) {
1873 case Primitive::kPrimInt:
1874 case Primitive::kPrimLong: {
1875 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1876 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1877 Location rhs_location = locations->InAt(1);
1878
1879 GpuRegister rhs_reg = ZERO;
1880 int64_t rhs_imm = 0;
1881 bool use_imm = rhs_location.IsConstant();
1882 if (use_imm) {
1883 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1884 } else {
1885 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1886 }
1887
1888 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00001889 uint32_t shift_value = rhs_imm &
1890 (type == Primitive::kPrimInt ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001891
Alexey Frunze92d90602015-12-18 18:16:36 -08001892 if (shift_value == 0) {
1893 if (dst != lhs) {
1894 __ Move(dst, lhs);
1895 }
1896 } else if (type == Primitive::kPrimInt) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001897 if (instr->IsShl()) {
1898 __ Sll(dst, lhs, shift_value);
1899 } else if (instr->IsShr()) {
1900 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001901 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001902 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001903 } else {
1904 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001905 }
1906 } else {
1907 if (shift_value < 32) {
1908 if (instr->IsShl()) {
1909 __ Dsll(dst, lhs, shift_value);
1910 } else if (instr->IsShr()) {
1911 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001912 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001913 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001914 } else {
1915 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001916 }
1917 } else {
1918 shift_value -= 32;
1919 if (instr->IsShl()) {
1920 __ Dsll32(dst, lhs, shift_value);
1921 } else if (instr->IsShr()) {
1922 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001923 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001924 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08001925 } else {
1926 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001927 }
1928 }
1929 }
1930 } else {
1931 if (type == Primitive::kPrimInt) {
1932 if (instr->IsShl()) {
1933 __ Sllv(dst, lhs, rhs_reg);
1934 } else if (instr->IsShr()) {
1935 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001936 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001937 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001938 } else {
1939 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001940 }
1941 } else {
1942 if (instr->IsShl()) {
1943 __ Dsllv(dst, lhs, rhs_reg);
1944 } else if (instr->IsShr()) {
1945 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001946 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001947 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08001948 } else {
1949 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001950 }
1951 }
1952 }
1953 break;
1954 }
1955 default:
1956 LOG(FATAL) << "Unexpected shift operation type " << type;
1957 }
1958}
1959
1960void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
1961 HandleBinaryOp(instruction);
1962}
1963
1964void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
1965 HandleBinaryOp(instruction);
1966}
1967
1968void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
1969 HandleBinaryOp(instruction);
1970}
1971
1972void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
1973 HandleBinaryOp(instruction);
1974}
1975
1976void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08001977 Primitive::Type type = instruction->GetType();
1978 bool object_array_get_with_read_barrier =
1979 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001980 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08001981 new (GetGraph()->GetArena()) LocationSummary(instruction,
1982 object_array_get_with_read_barrier
1983 ? LocationSummary::kCallOnSlowPath
1984 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001985 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
1986 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
1987 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001988 locations->SetInAt(0, Location::RequiresRegister());
1989 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08001990 if (Primitive::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1992 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08001993 // The output overlaps in the case of an object array get with
1994 // read barriers enabled: we do not want the move to overwrite the
1995 // array's location, as we need it to emit the read barrier.
1996 locations->SetOut(Location::RequiresRegister(),
1997 object_array_get_with_read_barrier
1998 ? Location::kOutputOverlap
1999 : Location::kNoOutputOverlap);
2000 }
2001 // We need a temporary register for the read barrier marking slow
2002 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2003 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2004 locations->AddTemp(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002005 }
2006}
2007
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002008static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2009 auto null_checker = [codegen, instruction]() {
2010 codegen->MaybeRecordImplicitNullCheck(instruction);
2011 };
2012 return null_checker;
2013}
2014
Alexey Frunze4dda3372015-06-01 18:31:49 -07002015void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2016 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002017 Location obj_loc = locations->InAt(0);
2018 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2019 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002020 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002021 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002022 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002023
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002024 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002025 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2026 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002027 switch (type) {
2028 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002029 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002030 if (index.IsConstant()) {
2031 size_t offset =
2032 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002033 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002034 } else {
2035 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002036 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002037 }
2038 break;
2039 }
2040
2041 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002042 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002043 if (index.IsConstant()) {
2044 size_t offset =
2045 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002046 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002047 } else {
2048 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002049 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002050 }
2051 break;
2052 }
2053
2054 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002055 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002056 if (index.IsConstant()) {
2057 size_t offset =
2058 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002059 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002060 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002061 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002062 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002063 }
2064 break;
2065 }
2066
2067 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002068 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002069 if (maybe_compressed_char_at) {
2070 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002071 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002072 __ Dext(TMP, TMP, 0, 1);
2073 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2074 "Expecting 0=compressed, 1=uncompressed");
2075 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002076 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002077 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2078 if (maybe_compressed_char_at) {
2079 Mips64Label uncompressed_load, done;
2080 __ Bnezc(TMP, &uncompressed_load);
2081 __ LoadFromOffset(kLoadUnsignedByte,
2082 out,
2083 obj,
2084 data_offset + (const_index << TIMES_1));
2085 __ Bc(&done);
2086 __ Bind(&uncompressed_load);
2087 __ LoadFromOffset(kLoadUnsignedHalfword,
2088 out,
2089 obj,
2090 data_offset + (const_index << TIMES_2));
2091 __ Bind(&done);
2092 } else {
2093 __ LoadFromOffset(kLoadUnsignedHalfword,
2094 out,
2095 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002096 data_offset + (const_index << TIMES_2),
2097 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002098 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002099 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002100 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2101 if (maybe_compressed_char_at) {
2102 Mips64Label uncompressed_load, done;
2103 __ Bnezc(TMP, &uncompressed_load);
2104 __ Daddu(TMP, obj, index_reg);
2105 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2106 __ Bc(&done);
2107 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002108 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002109 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2110 __ Bind(&done);
2111 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002112 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002113 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002114 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002115 }
2116 break;
2117 }
2118
Alexey Frunze15958152017-02-09 19:08:30 -08002119 case Primitive::kPrimInt: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002120 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002121 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002122 LoadOperandType load_type = (type == Primitive::kPrimNot) ? kLoadUnsignedWord : kLoadWord;
2123 if (index.IsConstant()) {
2124 size_t offset =
2125 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002126 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002127 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002128 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002129 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002130 }
2131 break;
2132 }
2133
Alexey Frunze15958152017-02-09 19:08:30 -08002134 case Primitive::kPrimNot: {
2135 static_assert(
2136 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2137 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2138 // /* HeapReference<Object> */ out =
2139 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2140 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2141 Location temp = locations->GetTemp(0);
2142 // Note that a potential implicit null check is handled in this
2143 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
2144 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2145 out_loc,
2146 obj,
2147 data_offset,
2148 index,
2149 temp,
2150 /* needs_null_check */ true);
2151 } else {
2152 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2153 if (index.IsConstant()) {
2154 size_t offset =
2155 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2156 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2157 // If read barriers are enabled, emit read barriers other than
2158 // Baker's using a slow path (and also unpoison the loaded
2159 // reference, if heap poisoning is enabled).
2160 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2161 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002162 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002163 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2164 // If read barriers are enabled, emit read barriers other than
2165 // Baker's using a slow path (and also unpoison the loaded
2166 // reference, if heap poisoning is enabled).
2167 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2168 out_loc,
2169 out_loc,
2170 obj_loc,
2171 data_offset,
2172 index);
2173 }
2174 }
2175 break;
2176 }
2177
Alexey Frunze4dda3372015-06-01 18:31:49 -07002178 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002179 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002180 if (index.IsConstant()) {
2181 size_t offset =
2182 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002183 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002184 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002185 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002186 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002187 }
2188 break;
2189 }
2190
2191 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002192 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002193 if (index.IsConstant()) {
2194 size_t offset =
2195 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002196 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002197 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002198 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002199 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002200 }
2201 break;
2202 }
2203
2204 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002205 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002206 if (index.IsConstant()) {
2207 size_t offset =
2208 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002209 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002210 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002211 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002212 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002213 }
2214 break;
2215 }
2216
2217 case Primitive::kPrimVoid:
2218 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2219 UNREACHABLE();
2220 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002221}
2222
2223void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
2224 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2225 locations->SetInAt(0, Location::RequiresRegister());
2226 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2227}
2228
2229void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2230 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002231 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002232 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2233 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2234 __ LoadFromOffset(kLoadWord, out, obj, offset);
2235 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002236 // Mask out compression flag from String's array length.
2237 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2238 __ Srl(out, out, 1u);
2239 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002240}
2241
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002242Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2243 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2244 ? Location::ConstantLocation(instruction->AsConstant())
2245 : Location::RequiresRegister();
2246}
2247
2248Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2249 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2250 // We can store a non-zero float or double constant without first loading it into the FPU,
2251 // but we should only prefer this if the constant has a single use.
2252 if (instruction->IsConstant() &&
2253 (instruction->AsConstant()->IsZeroBitPattern() ||
2254 instruction->GetUses().HasExactlyOneElement())) {
2255 return Location::ConstantLocation(instruction->AsConstant());
2256 // Otherwise fall through and require an FPU register for the constant.
2257 }
2258 return Location::RequiresFpuRegister();
2259}
2260
Alexey Frunze4dda3372015-06-01 18:31:49 -07002261void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002262 Primitive::Type value_type = instruction->GetComponentType();
2263
2264 bool needs_write_barrier =
2265 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2266 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2267
Alexey Frunze4dda3372015-06-01 18:31:49 -07002268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2269 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002270 may_need_runtime_call_for_type_check ?
2271 LocationSummary::kCallOnSlowPath :
2272 LocationSummary::kNoCall);
2273
2274 locations->SetInAt(0, Location::RequiresRegister());
2275 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2276 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2277 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002278 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002279 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2280 }
2281 if (needs_write_barrier) {
2282 // Temporary register for the write barrier.
2283 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002284 }
2285}
2286
2287void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2288 LocationSummary* locations = instruction->GetLocations();
2289 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2290 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002291 Location value_location = locations->InAt(2);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002292 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002293 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002294 bool needs_write_barrier =
2295 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002296 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002297 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002298
2299 switch (value_type) {
2300 case Primitive::kPrimBoolean:
2301 case Primitive::kPrimByte: {
2302 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002303 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002304 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002305 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002306 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2307 }
2308 if (value_location.IsConstant()) {
2309 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2310 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2311 } else {
2312 GpuRegister value = value_location.AsRegister<GpuRegister>();
2313 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002314 }
2315 break;
2316 }
2317
2318 case Primitive::kPrimShort:
2319 case Primitive::kPrimChar: {
2320 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002321 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002322 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002323 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002324 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002325 }
2326 if (value_location.IsConstant()) {
2327 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2328 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2329 } else {
2330 GpuRegister value = value_location.AsRegister<GpuRegister>();
2331 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002332 }
2333 break;
2334 }
2335
Alexey Frunze15958152017-02-09 19:08:30 -08002336 case Primitive::kPrimInt: {
2337 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2338 if (index.IsConstant()) {
2339 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2340 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002341 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002342 }
2343 if (value_location.IsConstant()) {
2344 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2345 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2346 } else {
2347 GpuRegister value = value_location.AsRegister<GpuRegister>();
2348 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2349 }
2350 break;
2351 }
2352
Alexey Frunze4dda3372015-06-01 18:31:49 -07002353 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002354 if (value_location.IsConstant()) {
2355 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002356 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002357 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002358 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002359 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002360 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002361 }
Alexey Frunze15958152017-02-09 19:08:30 -08002362 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2363 DCHECK_EQ(value, 0);
2364 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2365 DCHECK(!needs_write_barrier);
2366 DCHECK(!may_need_runtime_call_for_type_check);
2367 break;
2368 }
2369
2370 DCHECK(needs_write_barrier);
2371 GpuRegister value = value_location.AsRegister<GpuRegister>();
2372 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2373 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2374 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2375 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2376 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2377 Mips64Label done;
2378 SlowPathCodeMIPS64* slow_path = nullptr;
2379
2380 if (may_need_runtime_call_for_type_check) {
2381 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS64(instruction);
2382 codegen_->AddSlowPath(slow_path);
2383 if (instruction->GetValueCanBeNull()) {
2384 Mips64Label non_zero;
2385 __ Bnezc(value, &non_zero);
2386 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2387 if (index.IsConstant()) {
2388 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002389 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002390 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002391 }
Alexey Frunze15958152017-02-09 19:08:30 -08002392 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2393 __ Bc(&done);
2394 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002395 }
Alexey Frunze15958152017-02-09 19:08:30 -08002396
2397 // Note that when read barriers are enabled, the type checks
2398 // are performed without read barriers. This is fine, even in
2399 // the case where a class object is in the from-space after
2400 // the flip, as a comparison involving such a type would not
2401 // produce a false positive; it may of course produce a false
2402 // negative, in which case we would take the ArraySet slow
2403 // path.
2404
2405 // /* HeapReference<Class> */ temp1 = obj->klass_
2406 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2407 __ MaybeUnpoisonHeapReference(temp1);
2408
2409 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2410 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2411 // /* HeapReference<Class> */ temp2 = value->klass_
2412 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2413 // If heap poisoning is enabled, no need to unpoison `temp1`
2414 // nor `temp2`, as we are comparing two poisoned references.
2415
2416 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2417 Mips64Label do_put;
2418 __ Beqc(temp1, temp2, &do_put);
2419 // If heap poisoning is enabled, the `temp1` reference has
2420 // not been unpoisoned yet; unpoison it now.
2421 __ MaybeUnpoisonHeapReference(temp1);
2422
2423 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2424 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2425 // If heap poisoning is enabled, no need to unpoison
2426 // `temp1`, as we are comparing against null below.
2427 __ Bnezc(temp1, slow_path->GetEntryLabel());
2428 __ Bind(&do_put);
2429 } else {
2430 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2431 }
2432 }
2433
2434 GpuRegister source = value;
2435 if (kPoisonHeapReferences) {
2436 // Note that in the case where `value` is a null reference,
2437 // we do not enter this block, as a null reference does not
2438 // need poisoning.
2439 __ Move(temp1, value);
2440 __ PoisonHeapReference(temp1);
2441 source = temp1;
2442 }
2443
2444 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2445 if (index.IsConstant()) {
2446 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002447 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002448 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002449 }
2450 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2451
2452 if (!may_need_runtime_call_for_type_check) {
2453 codegen_->MaybeRecordImplicitNullCheck(instruction);
2454 }
2455
2456 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2457
2458 if (done.IsLinked()) {
2459 __ Bind(&done);
2460 }
2461
2462 if (slow_path != nullptr) {
2463 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002464 }
2465 break;
2466 }
2467
2468 case Primitive::kPrimLong: {
2469 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002470 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002471 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002472 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002473 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002474 }
2475 if (value_location.IsConstant()) {
2476 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2477 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2478 } else {
2479 GpuRegister value = value_location.AsRegister<GpuRegister>();
2480 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002481 }
2482 break;
2483 }
2484
2485 case Primitive::kPrimFloat: {
2486 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002487 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002488 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002489 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002490 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002491 }
2492 if (value_location.IsConstant()) {
2493 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2494 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2495 } else {
2496 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2497 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002498 }
2499 break;
2500 }
2501
2502 case Primitive::kPrimDouble: {
2503 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002504 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002505 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002506 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002507 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002508 }
2509 if (value_location.IsConstant()) {
2510 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2511 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2512 } else {
2513 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2514 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002515 }
2516 break;
2517 }
2518
2519 case Primitive::kPrimVoid:
2520 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2521 UNREACHABLE();
2522 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002523}
2524
2525void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002526 RegisterSet caller_saves = RegisterSet::Empty();
2527 InvokeRuntimeCallingConvention calling_convention;
2528 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2529 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2530 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002531 locations->SetInAt(0, Location::RequiresRegister());
2532 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002533}
2534
2535void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2536 LocationSummary* locations = instruction->GetLocations();
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002537 BoundsCheckSlowPathMIPS64* slow_path =
2538 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002539 codegen_->AddSlowPath(slow_path);
2540
2541 GpuRegister index = locations->InAt(0).AsRegister<GpuRegister>();
2542 GpuRegister length = locations->InAt(1).AsRegister<GpuRegister>();
2543
2544 // length is limited by the maximum positive signed 32-bit integer.
2545 // Unsigned comparison of length and index checks for index < 0
2546 // and for length <= index simultaneously.
Alexey Frunzea0e87b02015-09-24 22:57:20 -07002547 __ Bgeuc(index, length, slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002548}
2549
Alexey Frunze15958152017-02-09 19:08:30 -08002550// Temp is used for read barrier.
2551static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2552 if (kEmitCompilerReadBarrier &&
2553 (kUseBakerReadBarrier ||
2554 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2555 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2556 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2557 return 1;
2558 }
2559 return 0;
2560}
2561
2562// Extra temp is used for read barrier.
2563static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2564 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2565}
2566
Alexey Frunze4dda3372015-06-01 18:31:49 -07002567void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002568 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2569 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2570
2571 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2572 switch (type_check_kind) {
2573 case TypeCheckKind::kExactCheck:
2574 case TypeCheckKind::kAbstractClassCheck:
2575 case TypeCheckKind::kClassHierarchyCheck:
2576 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002577 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002578 ? LocationSummary::kCallOnSlowPath
2579 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2580 break;
2581 case TypeCheckKind::kArrayCheck:
2582 case TypeCheckKind::kUnresolvedCheck:
2583 case TypeCheckKind::kInterfaceCheck:
2584 call_kind = LocationSummary::kCallOnSlowPath;
2585 break;
2586 }
2587
2588 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002589 locations->SetInAt(0, Location::RequiresRegister());
2590 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002591 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002592}
2593
2594void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002595 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002596 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002597 Location obj_loc = locations->InAt(0);
2598 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002599 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002600 Location temp_loc = locations->GetTemp(0);
2601 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2602 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2603 DCHECK_LE(num_temps, 2u);
2604 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002605 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2606 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2607 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2608 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2609 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2610 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2611 const uint32_t object_array_data_offset =
2612 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2613 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002614
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002615 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2616 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2617 // read barriers is done for performance and code size reasons.
2618 bool is_type_check_slow_path_fatal = false;
2619 if (!kEmitCompilerReadBarrier) {
2620 is_type_check_slow_path_fatal =
2621 (type_check_kind == TypeCheckKind::kExactCheck ||
2622 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2623 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2624 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2625 !instruction->CanThrowIntoCatchBlock();
2626 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002627 SlowPathCodeMIPS64* slow_path =
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002628 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
2629 is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002630 codegen_->AddSlowPath(slow_path);
2631
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002632 // Avoid this check if we know `obj` is not null.
2633 if (instruction->MustDoNullCheck()) {
2634 __ Beqzc(obj, &done);
2635 }
2636
2637 switch (type_check_kind) {
2638 case TypeCheckKind::kExactCheck:
2639 case TypeCheckKind::kArrayCheck: {
2640 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002641 GenerateReferenceLoadTwoRegisters(instruction,
2642 temp_loc,
2643 obj_loc,
2644 class_offset,
2645 maybe_temp2_loc,
2646 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002647 // Jump to slow path for throwing the exception or doing a
2648 // more involved array check.
2649 __ Bnec(temp, cls, slow_path->GetEntryLabel());
2650 break;
2651 }
2652
2653 case TypeCheckKind::kAbstractClassCheck: {
2654 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002655 GenerateReferenceLoadTwoRegisters(instruction,
2656 temp_loc,
2657 obj_loc,
2658 class_offset,
2659 maybe_temp2_loc,
2660 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002661 // If the class is abstract, we eagerly fetch the super class of the
2662 // object to avoid doing a comparison we know will fail.
2663 Mips64Label loop;
2664 __ Bind(&loop);
2665 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002666 GenerateReferenceLoadOneRegister(instruction,
2667 temp_loc,
2668 super_offset,
2669 maybe_temp2_loc,
2670 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002671 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2672 // exception.
2673 __ Beqzc(temp, slow_path->GetEntryLabel());
2674 // Otherwise, compare the classes.
2675 __ Bnec(temp, cls, &loop);
2676 break;
2677 }
2678
2679 case TypeCheckKind::kClassHierarchyCheck: {
2680 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002681 GenerateReferenceLoadTwoRegisters(instruction,
2682 temp_loc,
2683 obj_loc,
2684 class_offset,
2685 maybe_temp2_loc,
2686 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002687 // Walk over the class hierarchy to find a match.
2688 Mips64Label loop;
2689 __ Bind(&loop);
2690 __ Beqc(temp, cls, &done);
2691 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002692 GenerateReferenceLoadOneRegister(instruction,
2693 temp_loc,
2694 super_offset,
2695 maybe_temp2_loc,
2696 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002697 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2698 // exception. Otherwise, jump to the beginning of the loop.
2699 __ Bnezc(temp, &loop);
2700 __ Bc(slow_path->GetEntryLabel());
2701 break;
2702 }
2703
2704 case TypeCheckKind::kArrayObjectCheck: {
2705 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002706 GenerateReferenceLoadTwoRegisters(instruction,
2707 temp_loc,
2708 obj_loc,
2709 class_offset,
2710 maybe_temp2_loc,
2711 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002712 // Do an exact check.
2713 __ Beqc(temp, cls, &done);
2714 // Otherwise, we need to check that the object's class is a non-primitive array.
2715 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002716 GenerateReferenceLoadOneRegister(instruction,
2717 temp_loc,
2718 component_offset,
2719 maybe_temp2_loc,
2720 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002721 // If the component type is null, jump to the slow path to throw the exception.
2722 __ Beqzc(temp, slow_path->GetEntryLabel());
2723 // Otherwise, the object is indeed an array, further check that this component
2724 // type is not a primitive type.
2725 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2726 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2727 __ Bnezc(temp, slow_path->GetEntryLabel());
2728 break;
2729 }
2730
2731 case TypeCheckKind::kUnresolvedCheck:
2732 // We always go into the type check slow path for the unresolved check case.
2733 // We cannot directly call the CheckCast runtime entry point
2734 // without resorting to a type checking slow path here (i.e. by
2735 // calling InvokeRuntime directly), as it would require to
2736 // assign fixed registers for the inputs of this HInstanceOf
2737 // instruction (following the runtime calling convention), which
2738 // might be cluttered by the potential first read barrier
2739 // emission at the beginning of this method.
2740 __ Bc(slow_path->GetEntryLabel());
2741 break;
2742
2743 case TypeCheckKind::kInterfaceCheck: {
2744 // Avoid read barriers to improve performance of the fast path. We can not get false
2745 // positives by doing this.
2746 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002747 GenerateReferenceLoadTwoRegisters(instruction,
2748 temp_loc,
2749 obj_loc,
2750 class_offset,
2751 maybe_temp2_loc,
2752 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002753 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002754 GenerateReferenceLoadTwoRegisters(instruction,
2755 temp_loc,
2756 temp_loc,
2757 iftable_offset,
2758 maybe_temp2_loc,
2759 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002760 // Iftable is never null.
2761 __ Lw(TMP, temp, array_length_offset);
2762 // Loop through the iftable and check if any class matches.
2763 Mips64Label loop;
2764 __ Bind(&loop);
2765 __ Beqzc(TMP, slow_path->GetEntryLabel());
2766 __ Lwu(AT, temp, object_array_data_offset);
2767 __ MaybeUnpoisonHeapReference(AT);
2768 // Go to next interface.
2769 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
2770 __ Addiu(TMP, TMP, -2);
2771 // Compare the classes and continue the loop if they do not match.
2772 __ Bnec(AT, cls, &loop);
2773 break;
2774 }
2775 }
2776
2777 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002778 __ Bind(slow_path->GetExitLabel());
2779}
2780
2781void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
2782 LocationSummary* locations =
2783 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2784 locations->SetInAt(0, Location::RequiresRegister());
2785 if (check->HasUses()) {
2786 locations->SetOut(Location::SameAsFirstInput());
2787 }
2788}
2789
2790void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
2791 // We assume the class is not null.
2792 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
2793 check->GetLoadClass(),
2794 check,
2795 check->GetDexPc(),
2796 true);
2797 codegen_->AddSlowPath(slow_path);
2798 GenerateClassInitializationCheck(slow_path,
2799 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
2800}
2801
2802void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
2803 Primitive::Type in_type = compare->InputAt(0)->GetType();
2804
Alexey Frunze299a9392015-12-08 16:08:02 -08002805 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002806
2807 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002808 case Primitive::kPrimBoolean:
2809 case Primitive::kPrimByte:
2810 case Primitive::kPrimShort:
2811 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002812 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002813 case Primitive::kPrimLong:
2814 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002815 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002816 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2817 break;
2818
2819 case Primitive::kPrimFloat:
Alexey Frunze299a9392015-12-08 16:08:02 -08002820 case Primitive::kPrimDouble:
2821 locations->SetInAt(0, Location::RequiresFpuRegister());
2822 locations->SetInAt(1, Location::RequiresFpuRegister());
2823 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002824 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002825
2826 default:
2827 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
2828 }
2829}
2830
2831void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
2832 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002833 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002834 Primitive::Type in_type = instruction->InputAt(0)->GetType();
2835
2836 // 0 if: left == right
2837 // 1 if: left > right
2838 // -1 if: left < right
2839 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002840 case Primitive::kPrimBoolean:
2841 case Primitive::kPrimByte:
2842 case Primitive::kPrimShort:
2843 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08002844 case Primitive::kPrimInt:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002845 case Primitive::kPrimLong: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002846 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002847 Location rhs_location = locations->InAt(1);
2848 bool use_imm = rhs_location.IsConstant();
2849 GpuRegister rhs = ZERO;
2850 if (use_imm) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00002851 if (in_type == Primitive::kPrimLong) {
Aart Bika19616e2016-02-01 18:57:58 -08002852 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2853 if (value != 0) {
2854 rhs = AT;
2855 __ LoadConst64(rhs, value);
2856 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00002857 } else {
2858 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
2859 if (value != 0) {
2860 rhs = AT;
2861 __ LoadConst32(rhs, value);
2862 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002863 }
2864 } else {
2865 rhs = rhs_location.AsRegister<GpuRegister>();
2866 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002867 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08002868 __ Slt(res, rhs, lhs);
2869 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002870 break;
2871 }
2872
Alexey Frunze299a9392015-12-08 16:08:02 -08002873 case Primitive::kPrimFloat: {
2874 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2875 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2876 Mips64Label done;
2877 __ CmpEqS(FTMP, lhs, rhs);
2878 __ LoadConst32(res, 0);
2879 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00002880 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08002881 __ CmpLtS(FTMP, lhs, rhs);
2882 __ LoadConst32(res, -1);
2883 __ Bc1nez(FTMP, &done);
2884 __ LoadConst32(res, 1);
2885 } else {
2886 __ CmpLtS(FTMP, rhs, lhs);
2887 __ LoadConst32(res, 1);
2888 __ Bc1nez(FTMP, &done);
2889 __ LoadConst32(res, -1);
2890 }
2891 __ Bind(&done);
2892 break;
2893 }
2894
Alexey Frunze4dda3372015-06-01 18:31:49 -07002895 case Primitive::kPrimDouble: {
Alexey Frunze299a9392015-12-08 16:08:02 -08002896 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2897 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2898 Mips64Label done;
2899 __ CmpEqD(FTMP, lhs, rhs);
2900 __ LoadConst32(res, 0);
2901 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00002902 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08002903 __ CmpLtD(FTMP, lhs, rhs);
2904 __ LoadConst32(res, -1);
2905 __ Bc1nez(FTMP, &done);
2906 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002907 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08002908 __ CmpLtD(FTMP, rhs, lhs);
2909 __ LoadConst32(res, 1);
2910 __ Bc1nez(FTMP, &done);
2911 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002912 }
Alexey Frunze299a9392015-12-08 16:08:02 -08002913 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002914 break;
2915 }
2916
2917 default:
2918 LOG(FATAL) << "Unimplemented compare type " << in_type;
2919 }
2920}
2921
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002922void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002923 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08002924 switch (instruction->InputAt(0)->GetType()) {
2925 default:
2926 case Primitive::kPrimLong:
2927 locations->SetInAt(0, Location::RequiresRegister());
2928 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2929 break;
2930
2931 case Primitive::kPrimFloat:
2932 case Primitive::kPrimDouble:
2933 locations->SetInAt(0, Location::RequiresFpuRegister());
2934 locations->SetInAt(1, Location::RequiresFpuRegister());
2935 break;
2936 }
David Brazdilb3e773e2016-01-26 11:28:37 +00002937 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002938 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2939 }
2940}
2941
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00002942void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00002943 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002944 return;
2945 }
2946
Alexey Frunze299a9392015-12-08 16:08:02 -08002947 Primitive::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002948 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08002949 switch (type) {
2950 default:
2951 // Integer case.
2952 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
2953 return;
2954 case Primitive::kPrimLong:
2955 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
2956 return;
Alexey Frunze299a9392015-12-08 16:08:02 -08002957 case Primitive::kPrimFloat:
2958 case Primitive::kPrimDouble:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01002959 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
2960 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002961 }
2962}
2963
Alexey Frunzec857c742015-09-23 15:12:39 -07002964void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
2965 DCHECK(instruction->IsDiv() || instruction->IsRem());
2966 Primitive::Type type = instruction->GetResultType();
2967
2968 LocationSummary* locations = instruction->GetLocations();
2969 Location second = locations->InAt(1);
2970 DCHECK(second.IsConstant());
2971
2972 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2973 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
2974 int64_t imm = Int64FromConstant(second.GetConstant());
2975 DCHECK(imm == 1 || imm == -1);
2976
2977 if (instruction->IsRem()) {
2978 __ Move(out, ZERO);
2979 } else {
2980 if (imm == -1) {
2981 if (type == Primitive::kPrimInt) {
2982 __ Subu(out, ZERO, dividend);
2983 } else {
2984 DCHECK_EQ(type, Primitive::kPrimLong);
2985 __ Dsubu(out, ZERO, dividend);
2986 }
2987 } else if (out != dividend) {
2988 __ Move(out, dividend);
2989 }
2990 }
2991}
2992
2993void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
2994 DCHECK(instruction->IsDiv() || instruction->IsRem());
2995 Primitive::Type type = instruction->GetResultType();
2996
2997 LocationSummary* locations = instruction->GetLocations();
2998 Location second = locations->InAt(1);
2999 DCHECK(second.IsConstant());
3000
3001 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3002 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3003 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003004 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003005 int ctz_imm = CTZ(abs_imm);
3006
3007 if (instruction->IsDiv()) {
3008 if (type == Primitive::kPrimInt) {
3009 if (ctz_imm == 1) {
3010 // Fast path for division by +/-2, which is very common.
3011 __ Srl(TMP, dividend, 31);
3012 } else {
3013 __ Sra(TMP, dividend, 31);
3014 __ Srl(TMP, TMP, 32 - ctz_imm);
3015 }
3016 __ Addu(out, dividend, TMP);
3017 __ Sra(out, out, ctz_imm);
3018 if (imm < 0) {
3019 __ Subu(out, ZERO, out);
3020 }
3021 } else {
3022 DCHECK_EQ(type, Primitive::kPrimLong);
3023 if (ctz_imm == 1) {
3024 // Fast path for division by +/-2, which is very common.
3025 __ Dsrl32(TMP, dividend, 31);
3026 } else {
3027 __ Dsra32(TMP, dividend, 31);
3028 if (ctz_imm > 32) {
3029 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3030 } else {
3031 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3032 }
3033 }
3034 __ Daddu(out, dividend, TMP);
3035 if (ctz_imm < 32) {
3036 __ Dsra(out, out, ctz_imm);
3037 } else {
3038 __ Dsra32(out, out, ctz_imm - 32);
3039 }
3040 if (imm < 0) {
3041 __ Dsubu(out, ZERO, out);
3042 }
3043 }
3044 } else {
3045 if (type == Primitive::kPrimInt) {
3046 if (ctz_imm == 1) {
3047 // Fast path for modulo +/-2, which is very common.
3048 __ Sra(TMP, dividend, 31);
3049 __ Subu(out, dividend, TMP);
3050 __ Andi(out, out, 1);
3051 __ Addu(out, out, TMP);
3052 } else {
3053 __ Sra(TMP, dividend, 31);
3054 __ Srl(TMP, TMP, 32 - ctz_imm);
3055 __ Addu(out, dividend, TMP);
3056 if (IsUint<16>(abs_imm - 1)) {
3057 __ Andi(out, out, abs_imm - 1);
3058 } else {
3059 __ Sll(out, out, 32 - ctz_imm);
3060 __ Srl(out, out, 32 - ctz_imm);
3061 }
3062 __ Subu(out, out, TMP);
3063 }
3064 } else {
3065 DCHECK_EQ(type, Primitive::kPrimLong);
3066 if (ctz_imm == 1) {
3067 // Fast path for modulo +/-2, which is very common.
3068 __ Dsra32(TMP, dividend, 31);
3069 __ Dsubu(out, dividend, TMP);
3070 __ Andi(out, out, 1);
3071 __ Daddu(out, out, TMP);
3072 } else {
3073 __ Dsra32(TMP, dividend, 31);
3074 if (ctz_imm > 32) {
3075 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3076 } else {
3077 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3078 }
3079 __ Daddu(out, dividend, TMP);
3080 if (IsUint<16>(abs_imm - 1)) {
3081 __ Andi(out, out, abs_imm - 1);
3082 } else {
3083 if (ctz_imm > 32) {
3084 __ Dsll(out, out, 64 - ctz_imm);
3085 __ Dsrl(out, out, 64 - ctz_imm);
3086 } else {
3087 __ Dsll32(out, out, 32 - ctz_imm);
3088 __ Dsrl32(out, out, 32 - ctz_imm);
3089 }
3090 }
3091 __ Dsubu(out, out, TMP);
3092 }
3093 }
3094 }
3095}
3096
3097void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3098 DCHECK(instruction->IsDiv() || instruction->IsRem());
3099
3100 LocationSummary* locations = instruction->GetLocations();
3101 Location second = locations->InAt(1);
3102 DCHECK(second.IsConstant());
3103
3104 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3105 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3106 int64_t imm = Int64FromConstant(second.GetConstant());
3107
3108 Primitive::Type type = instruction->GetResultType();
3109 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3110
3111 int64_t magic;
3112 int shift;
3113 CalculateMagicAndShiftForDivRem(imm,
3114 (type == Primitive::kPrimLong),
3115 &magic,
3116 &shift);
3117
3118 if (type == Primitive::kPrimInt) {
3119 __ LoadConst32(TMP, magic);
3120 __ MuhR6(TMP, dividend, TMP);
3121
3122 if (imm > 0 && magic < 0) {
3123 __ Addu(TMP, TMP, dividend);
3124 } else if (imm < 0 && magic > 0) {
3125 __ Subu(TMP, TMP, dividend);
3126 }
3127
3128 if (shift != 0) {
3129 __ Sra(TMP, TMP, shift);
3130 }
3131
3132 if (instruction->IsDiv()) {
3133 __ Sra(out, TMP, 31);
3134 __ Subu(out, TMP, out);
3135 } else {
3136 __ Sra(AT, TMP, 31);
3137 __ Subu(AT, TMP, AT);
3138 __ LoadConst32(TMP, imm);
3139 __ MulR6(TMP, AT, TMP);
3140 __ Subu(out, dividend, TMP);
3141 }
3142 } else {
3143 __ LoadConst64(TMP, magic);
3144 __ Dmuh(TMP, dividend, TMP);
3145
3146 if (imm > 0 && magic < 0) {
3147 __ Daddu(TMP, TMP, dividend);
3148 } else if (imm < 0 && magic > 0) {
3149 __ Dsubu(TMP, TMP, dividend);
3150 }
3151
3152 if (shift >= 32) {
3153 __ Dsra32(TMP, TMP, shift - 32);
3154 } else if (shift > 0) {
3155 __ Dsra(TMP, TMP, shift);
3156 }
3157
3158 if (instruction->IsDiv()) {
3159 __ Dsra32(out, TMP, 31);
3160 __ Dsubu(out, TMP, out);
3161 } else {
3162 __ Dsra32(AT, TMP, 31);
3163 __ Dsubu(AT, TMP, AT);
3164 __ LoadConst64(TMP, imm);
3165 __ Dmul(TMP, AT, TMP);
3166 __ Dsubu(out, dividend, TMP);
3167 }
3168 }
3169}
3170
3171void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3172 DCHECK(instruction->IsDiv() || instruction->IsRem());
3173 Primitive::Type type = instruction->GetResultType();
3174 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong) << type;
3175
3176 LocationSummary* locations = instruction->GetLocations();
3177 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3178 Location second = locations->InAt(1);
3179
3180 if (second.IsConstant()) {
3181 int64_t imm = Int64FromConstant(second.GetConstant());
3182 if (imm == 0) {
3183 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3184 } else if (imm == 1 || imm == -1) {
3185 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003186 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003187 DivRemByPowerOfTwo(instruction);
3188 } else {
3189 DCHECK(imm <= -2 || imm >= 2);
3190 GenerateDivRemWithAnyConstant(instruction);
3191 }
3192 } else {
3193 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3194 GpuRegister divisor = second.AsRegister<GpuRegister>();
3195 if (instruction->IsDiv()) {
3196 if (type == Primitive::kPrimInt)
3197 __ DivR6(out, dividend, divisor);
3198 else
3199 __ Ddiv(out, dividend, divisor);
3200 } else {
3201 if (type == Primitive::kPrimInt)
3202 __ ModR6(out, dividend, divisor);
3203 else
3204 __ Dmod(out, dividend, divisor);
3205 }
3206 }
3207}
3208
Alexey Frunze4dda3372015-06-01 18:31:49 -07003209void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3210 LocationSummary* locations =
3211 new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
3212 switch (div->GetResultType()) {
3213 case Primitive::kPrimInt:
3214 case Primitive::kPrimLong:
3215 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003216 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003217 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3218 break;
3219
3220 case Primitive::kPrimFloat:
3221 case Primitive::kPrimDouble:
3222 locations->SetInAt(0, Location::RequiresFpuRegister());
3223 locations->SetInAt(1, Location::RequiresFpuRegister());
3224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3225 break;
3226
3227 default:
3228 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3229 }
3230}
3231
3232void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
3233 Primitive::Type type = instruction->GetType();
3234 LocationSummary* locations = instruction->GetLocations();
3235
3236 switch (type) {
3237 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07003238 case Primitive::kPrimLong:
3239 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003240 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003241 case Primitive::kPrimFloat:
3242 case Primitive::kPrimDouble: {
3243 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3244 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3245 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3246 if (type == Primitive::kPrimFloat)
3247 __ DivS(dst, lhs, rhs);
3248 else
3249 __ DivD(dst, lhs, rhs);
3250 break;
3251 }
3252 default:
3253 LOG(FATAL) << "Unexpected div type " << type;
3254 }
3255}
3256
3257void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003258 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003259 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003260}
3261
3262void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3263 SlowPathCodeMIPS64* slow_path =
3264 new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS64(instruction);
3265 codegen_->AddSlowPath(slow_path);
3266 Location value = instruction->GetLocations()->InAt(0);
3267
3268 Primitive::Type type = instruction->GetType();
3269
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003270 if (!Primitive::IsIntegralType(type)) {
3271 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003272 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003273 }
3274
3275 if (value.IsConstant()) {
3276 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3277 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003278 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003279 } else {
3280 // A division by a non-null constant is valid. We don't need to perform
3281 // any check, so simply fall through.
3282 }
3283 } else {
3284 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3285 }
3286}
3287
3288void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3289 LocationSummary* locations =
3290 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3291 locations->SetOut(Location::ConstantLocation(constant));
3292}
3293
3294void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3295 // Will be generated at use site.
3296}
3297
3298void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3299 exit->SetLocations(nullptr);
3300}
3301
3302void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3303}
3304
3305void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3306 LocationSummary* locations =
3307 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3308 locations->SetOut(Location::ConstantLocation(constant));
3309}
3310
3311void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3312 // Will be generated at use site.
3313}
3314
David Brazdilfc6a86a2015-06-26 10:33:45 +00003315void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003316 DCHECK(!successor->IsExitBlock());
3317 HBasicBlock* block = got->GetBlock();
3318 HInstruction* previous = got->GetPrevious();
3319 HLoopInformation* info = block->GetLoopInformation();
3320
3321 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3322 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3323 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3324 return;
3325 }
3326 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3327 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3328 }
3329 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003330 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003331 }
3332}
3333
David Brazdilfc6a86a2015-06-26 10:33:45 +00003334void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3335 got->SetLocations(nullptr);
3336}
3337
3338void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3339 HandleGoto(got, got->GetSuccessor());
3340}
3341
3342void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3343 try_boundary->SetLocations(nullptr);
3344}
3345
3346void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3347 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3348 if (!successor->IsExitBlock()) {
3349 HandleGoto(try_boundary, successor);
3350 }
3351}
3352
Alexey Frunze299a9392015-12-08 16:08:02 -08003353void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3354 bool is64bit,
3355 LocationSummary* locations) {
3356 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3357 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3358 Location rhs_location = locations->InAt(1);
3359 GpuRegister rhs_reg = ZERO;
3360 int64_t rhs_imm = 0;
3361 bool use_imm = rhs_location.IsConstant();
3362 if (use_imm) {
3363 if (is64bit) {
3364 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3365 } else {
3366 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3367 }
3368 } else {
3369 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3370 }
3371 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3372
3373 switch (cond) {
3374 case kCondEQ:
3375 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003376 if (use_imm && IsInt<16>(-rhs_imm)) {
3377 if (rhs_imm == 0) {
3378 if (cond == kCondEQ) {
3379 __ Sltiu(dst, lhs, 1);
3380 } else {
3381 __ Sltu(dst, ZERO, lhs);
3382 }
3383 } else {
3384 if (is64bit) {
3385 __ Daddiu(dst, lhs, -rhs_imm);
3386 } else {
3387 __ Addiu(dst, lhs, -rhs_imm);
3388 }
3389 if (cond == kCondEQ) {
3390 __ Sltiu(dst, dst, 1);
3391 } else {
3392 __ Sltu(dst, ZERO, dst);
3393 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003394 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003395 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003396 if (use_imm && IsUint<16>(rhs_imm)) {
3397 __ Xori(dst, lhs, rhs_imm);
3398 } else {
3399 if (use_imm) {
3400 rhs_reg = TMP;
3401 __ LoadConst64(rhs_reg, rhs_imm);
3402 }
3403 __ Xor(dst, lhs, rhs_reg);
3404 }
3405 if (cond == kCondEQ) {
3406 __ Sltiu(dst, dst, 1);
3407 } else {
3408 __ Sltu(dst, ZERO, dst);
3409 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003410 }
3411 break;
3412
3413 case kCondLT:
3414 case kCondGE:
3415 if (use_imm && IsInt<16>(rhs_imm)) {
3416 __ Slti(dst, lhs, rhs_imm);
3417 } else {
3418 if (use_imm) {
3419 rhs_reg = TMP;
3420 __ LoadConst64(rhs_reg, rhs_imm);
3421 }
3422 __ Slt(dst, lhs, rhs_reg);
3423 }
3424 if (cond == kCondGE) {
3425 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3426 // only the slt instruction but no sge.
3427 __ Xori(dst, dst, 1);
3428 }
3429 break;
3430
3431 case kCondLE:
3432 case kCondGT:
3433 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3434 // Simulate lhs <= rhs via lhs < rhs + 1.
3435 __ Slti(dst, lhs, rhs_imm_plus_one);
3436 if (cond == kCondGT) {
3437 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3438 // only the slti instruction but no sgti.
3439 __ Xori(dst, dst, 1);
3440 }
3441 } else {
3442 if (use_imm) {
3443 rhs_reg = TMP;
3444 __ LoadConst64(rhs_reg, rhs_imm);
3445 }
3446 __ Slt(dst, rhs_reg, lhs);
3447 if (cond == kCondLE) {
3448 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3449 // only the slt instruction but no sle.
3450 __ Xori(dst, dst, 1);
3451 }
3452 }
3453 break;
3454
3455 case kCondB:
3456 case kCondAE:
3457 if (use_imm && IsInt<16>(rhs_imm)) {
3458 // Sltiu sign-extends its 16-bit immediate operand before
3459 // the comparison and thus lets us compare directly with
3460 // unsigned values in the ranges [0, 0x7fff] and
3461 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3462 __ Sltiu(dst, lhs, rhs_imm);
3463 } else {
3464 if (use_imm) {
3465 rhs_reg = TMP;
3466 __ LoadConst64(rhs_reg, rhs_imm);
3467 }
3468 __ Sltu(dst, lhs, rhs_reg);
3469 }
3470 if (cond == kCondAE) {
3471 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3472 // only the sltu instruction but no sgeu.
3473 __ Xori(dst, dst, 1);
3474 }
3475 break;
3476
3477 case kCondBE:
3478 case kCondA:
3479 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3480 // Simulate lhs <= rhs via lhs < rhs + 1.
3481 // Note that this only works if rhs + 1 does not overflow
3482 // to 0, hence the check above.
3483 // Sltiu sign-extends its 16-bit immediate operand before
3484 // the comparison and thus lets us compare directly with
3485 // unsigned values in the ranges [0, 0x7fff] and
3486 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3487 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3488 if (cond == kCondA) {
3489 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3490 // only the sltiu instruction but no sgtiu.
3491 __ Xori(dst, dst, 1);
3492 }
3493 } else {
3494 if (use_imm) {
3495 rhs_reg = TMP;
3496 __ LoadConst64(rhs_reg, rhs_imm);
3497 }
3498 __ Sltu(dst, rhs_reg, lhs);
3499 if (cond == kCondBE) {
3500 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3501 // only the sltu instruction but no sleu.
3502 __ Xori(dst, dst, 1);
3503 }
3504 }
3505 break;
3506 }
3507}
3508
3509void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3510 bool is64bit,
3511 LocationSummary* locations,
3512 Mips64Label* label) {
3513 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3514 Location rhs_location = locations->InAt(1);
3515 GpuRegister rhs_reg = ZERO;
3516 int64_t rhs_imm = 0;
3517 bool use_imm = rhs_location.IsConstant();
3518 if (use_imm) {
3519 if (is64bit) {
3520 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3521 } else {
3522 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3523 }
3524 } else {
3525 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3526 }
3527
3528 if (use_imm && rhs_imm == 0) {
3529 switch (cond) {
3530 case kCondEQ:
3531 case kCondBE: // <= 0 if zero
3532 __ Beqzc(lhs, label);
3533 break;
3534 case kCondNE:
3535 case kCondA: // > 0 if non-zero
3536 __ Bnezc(lhs, label);
3537 break;
3538 case kCondLT:
3539 __ Bltzc(lhs, label);
3540 break;
3541 case kCondGE:
3542 __ Bgezc(lhs, label);
3543 break;
3544 case kCondLE:
3545 __ Blezc(lhs, label);
3546 break;
3547 case kCondGT:
3548 __ Bgtzc(lhs, label);
3549 break;
3550 case kCondB: // always false
3551 break;
3552 case kCondAE: // always true
3553 __ Bc(label);
3554 break;
3555 }
3556 } else {
3557 if (use_imm) {
3558 rhs_reg = TMP;
3559 __ LoadConst64(rhs_reg, rhs_imm);
3560 }
3561 switch (cond) {
3562 case kCondEQ:
3563 __ Beqc(lhs, rhs_reg, label);
3564 break;
3565 case kCondNE:
3566 __ Bnec(lhs, rhs_reg, label);
3567 break;
3568 case kCondLT:
3569 __ Bltc(lhs, rhs_reg, label);
3570 break;
3571 case kCondGE:
3572 __ Bgec(lhs, rhs_reg, label);
3573 break;
3574 case kCondLE:
3575 __ Bgec(rhs_reg, lhs, label);
3576 break;
3577 case kCondGT:
3578 __ Bltc(rhs_reg, lhs, label);
3579 break;
3580 case kCondB:
3581 __ Bltuc(lhs, rhs_reg, label);
3582 break;
3583 case kCondAE:
3584 __ Bgeuc(lhs, rhs_reg, label);
3585 break;
3586 case kCondBE:
3587 __ Bgeuc(rhs_reg, lhs, label);
3588 break;
3589 case kCondA:
3590 __ Bltuc(rhs_reg, lhs, label);
3591 break;
3592 }
3593 }
3594}
3595
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003596void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3597 bool gt_bias,
3598 Primitive::Type type,
3599 LocationSummary* locations) {
3600 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3601 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3602 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3603 if (type == Primitive::kPrimFloat) {
3604 switch (cond) {
3605 case kCondEQ:
3606 __ CmpEqS(FTMP, lhs, rhs);
3607 __ Mfc1(dst, FTMP);
3608 __ Andi(dst, dst, 1);
3609 break;
3610 case kCondNE:
3611 __ CmpEqS(FTMP, lhs, rhs);
3612 __ Mfc1(dst, FTMP);
3613 __ Addiu(dst, dst, 1);
3614 break;
3615 case kCondLT:
3616 if (gt_bias) {
3617 __ CmpLtS(FTMP, lhs, rhs);
3618 } else {
3619 __ CmpUltS(FTMP, lhs, rhs);
3620 }
3621 __ Mfc1(dst, FTMP);
3622 __ Andi(dst, dst, 1);
3623 break;
3624 case kCondLE:
3625 if (gt_bias) {
3626 __ CmpLeS(FTMP, lhs, rhs);
3627 } else {
3628 __ CmpUleS(FTMP, lhs, rhs);
3629 }
3630 __ Mfc1(dst, FTMP);
3631 __ Andi(dst, dst, 1);
3632 break;
3633 case kCondGT:
3634 if (gt_bias) {
3635 __ CmpUltS(FTMP, rhs, lhs);
3636 } else {
3637 __ CmpLtS(FTMP, rhs, lhs);
3638 }
3639 __ Mfc1(dst, FTMP);
3640 __ Andi(dst, dst, 1);
3641 break;
3642 case kCondGE:
3643 if (gt_bias) {
3644 __ CmpUleS(FTMP, rhs, lhs);
3645 } else {
3646 __ CmpLeS(FTMP, rhs, lhs);
3647 }
3648 __ Mfc1(dst, FTMP);
3649 __ Andi(dst, dst, 1);
3650 break;
3651 default:
3652 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3653 UNREACHABLE();
3654 }
3655 } else {
3656 DCHECK_EQ(type, Primitive::kPrimDouble);
3657 switch (cond) {
3658 case kCondEQ:
3659 __ CmpEqD(FTMP, lhs, rhs);
3660 __ Mfc1(dst, FTMP);
3661 __ Andi(dst, dst, 1);
3662 break;
3663 case kCondNE:
3664 __ CmpEqD(FTMP, lhs, rhs);
3665 __ Mfc1(dst, FTMP);
3666 __ Addiu(dst, dst, 1);
3667 break;
3668 case kCondLT:
3669 if (gt_bias) {
3670 __ CmpLtD(FTMP, lhs, rhs);
3671 } else {
3672 __ CmpUltD(FTMP, lhs, rhs);
3673 }
3674 __ Mfc1(dst, FTMP);
3675 __ Andi(dst, dst, 1);
3676 break;
3677 case kCondLE:
3678 if (gt_bias) {
3679 __ CmpLeD(FTMP, lhs, rhs);
3680 } else {
3681 __ CmpUleD(FTMP, lhs, rhs);
3682 }
3683 __ Mfc1(dst, FTMP);
3684 __ Andi(dst, dst, 1);
3685 break;
3686 case kCondGT:
3687 if (gt_bias) {
3688 __ CmpUltD(FTMP, rhs, lhs);
3689 } else {
3690 __ CmpLtD(FTMP, rhs, lhs);
3691 }
3692 __ Mfc1(dst, FTMP);
3693 __ Andi(dst, dst, 1);
3694 break;
3695 case kCondGE:
3696 if (gt_bias) {
3697 __ CmpUleD(FTMP, rhs, lhs);
3698 } else {
3699 __ CmpLeD(FTMP, rhs, lhs);
3700 }
3701 __ Mfc1(dst, FTMP);
3702 __ Andi(dst, dst, 1);
3703 break;
3704 default:
3705 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3706 UNREACHABLE();
3707 }
3708 }
3709}
3710
Alexey Frunze299a9392015-12-08 16:08:02 -08003711void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
3712 bool gt_bias,
3713 Primitive::Type type,
3714 LocationSummary* locations,
3715 Mips64Label* label) {
3716 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3717 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3718 if (type == Primitive::kPrimFloat) {
3719 switch (cond) {
3720 case kCondEQ:
3721 __ CmpEqS(FTMP, lhs, rhs);
3722 __ Bc1nez(FTMP, label);
3723 break;
3724 case kCondNE:
3725 __ CmpEqS(FTMP, lhs, rhs);
3726 __ Bc1eqz(FTMP, label);
3727 break;
3728 case kCondLT:
3729 if (gt_bias) {
3730 __ CmpLtS(FTMP, lhs, rhs);
3731 } else {
3732 __ CmpUltS(FTMP, lhs, rhs);
3733 }
3734 __ Bc1nez(FTMP, label);
3735 break;
3736 case kCondLE:
3737 if (gt_bias) {
3738 __ CmpLeS(FTMP, lhs, rhs);
3739 } else {
3740 __ CmpUleS(FTMP, lhs, rhs);
3741 }
3742 __ Bc1nez(FTMP, label);
3743 break;
3744 case kCondGT:
3745 if (gt_bias) {
3746 __ CmpUltS(FTMP, rhs, lhs);
3747 } else {
3748 __ CmpLtS(FTMP, rhs, lhs);
3749 }
3750 __ Bc1nez(FTMP, label);
3751 break;
3752 case kCondGE:
3753 if (gt_bias) {
3754 __ CmpUleS(FTMP, rhs, lhs);
3755 } else {
3756 __ CmpLeS(FTMP, rhs, lhs);
3757 }
3758 __ Bc1nez(FTMP, label);
3759 break;
3760 default:
3761 LOG(FATAL) << "Unexpected non-floating-point condition";
3762 }
3763 } else {
3764 DCHECK_EQ(type, Primitive::kPrimDouble);
3765 switch (cond) {
3766 case kCondEQ:
3767 __ CmpEqD(FTMP, lhs, rhs);
3768 __ Bc1nez(FTMP, label);
3769 break;
3770 case kCondNE:
3771 __ CmpEqD(FTMP, lhs, rhs);
3772 __ Bc1eqz(FTMP, label);
3773 break;
3774 case kCondLT:
3775 if (gt_bias) {
3776 __ CmpLtD(FTMP, lhs, rhs);
3777 } else {
3778 __ CmpUltD(FTMP, lhs, rhs);
3779 }
3780 __ Bc1nez(FTMP, label);
3781 break;
3782 case kCondLE:
3783 if (gt_bias) {
3784 __ CmpLeD(FTMP, lhs, rhs);
3785 } else {
3786 __ CmpUleD(FTMP, lhs, rhs);
3787 }
3788 __ Bc1nez(FTMP, label);
3789 break;
3790 case kCondGT:
3791 if (gt_bias) {
3792 __ CmpUltD(FTMP, rhs, lhs);
3793 } else {
3794 __ CmpLtD(FTMP, rhs, lhs);
3795 }
3796 __ Bc1nez(FTMP, label);
3797 break;
3798 case kCondGE:
3799 if (gt_bias) {
3800 __ CmpUleD(FTMP, rhs, lhs);
3801 } else {
3802 __ CmpLeD(FTMP, rhs, lhs);
3803 }
3804 __ Bc1nez(FTMP, label);
3805 break;
3806 default:
3807 LOG(FATAL) << "Unexpected non-floating-point condition";
3808 }
3809 }
3810}
3811
Alexey Frunze4dda3372015-06-01 18:31:49 -07003812void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00003813 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003814 Mips64Label* true_target,
3815 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00003816 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003817
David Brazdil0debae72015-11-12 18:37:00 +00003818 if (true_target == nullptr && false_target == nullptr) {
3819 // Nothing to do. The code always falls through.
3820 return;
3821 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00003822 // Constant condition, statically compared against "true" (integer value 1).
3823 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00003824 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003825 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003826 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003827 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00003828 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00003829 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003830 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00003831 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003832 }
David Brazdil0debae72015-11-12 18:37:00 +00003833 return;
3834 }
3835
3836 // The following code generates these patterns:
3837 // (1) true_target == nullptr && false_target != nullptr
3838 // - opposite condition true => branch to false_target
3839 // (2) true_target != nullptr && false_target == nullptr
3840 // - condition true => branch to true_target
3841 // (3) true_target != nullptr && false_target != nullptr
3842 // - condition true => branch to true_target
3843 // - branch to false_target
3844 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003845 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00003846 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003847 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00003848 if (true_target == nullptr) {
3849 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
3850 } else {
3851 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
3852 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003853 } else {
3854 // The condition instruction has not been materialized, use its inputs as
3855 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00003856 HCondition* condition = cond->AsCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08003857 Primitive::Type type = condition->InputAt(0)->GetType();
3858 LocationSummary* locations = cond->GetLocations();
3859 IfCondition if_cond = condition->GetCondition();
3860 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00003861
David Brazdil0debae72015-11-12 18:37:00 +00003862 if (true_target == nullptr) {
3863 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08003864 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00003865 }
3866
Alexey Frunze299a9392015-12-08 16:08:02 -08003867 switch (type) {
3868 default:
3869 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
3870 break;
3871 case Primitive::kPrimLong:
3872 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
3873 break;
3874 case Primitive::kPrimFloat:
3875 case Primitive::kPrimDouble:
3876 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
3877 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003878 }
3879 }
David Brazdil0debae72015-11-12 18:37:00 +00003880
3881 // If neither branch falls through (case 3), the conditional branch to `true_target`
3882 // was already emitted (case 2) and we need to emit a jump to `false_target`.
3883 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003884 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003885 }
3886}
3887
3888void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
3889 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00003890 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003891 locations->SetInAt(0, Location::RequiresRegister());
3892 }
3893}
3894
3895void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00003896 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
3897 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003898 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00003899 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003900 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00003901 nullptr : codegen_->GetLabelOf(false_successor);
3902 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003903}
3904
3905void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
3906 LocationSummary* locations = new (GetGraph()->GetArena())
3907 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01003908 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
David Brazdil0debae72015-11-12 18:37:00 +00003909 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003910 locations->SetInAt(0, Location::RequiresRegister());
3911 }
3912}
3913
3914void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08003915 SlowPathCodeMIPS64* slow_path =
3916 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00003917 GenerateTestAndBranch(deoptimize,
3918 /* condition_input_index */ 0,
3919 slow_path->GetEntryLabel(),
3920 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003921}
3922
Goran Jakovljevicc6418422016-12-05 16:31:55 +01003923void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3924 LocationSummary* locations = new (GetGraph()->GetArena())
3925 LocationSummary(flag, LocationSummary::kNoCall);
3926 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07003927}
3928
Goran Jakovljevicc6418422016-12-05 16:31:55 +01003929void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
3930 __ LoadFromOffset(kLoadWord,
3931 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
3932 SP,
3933 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07003934}
3935
David Brazdil74eb1b22015-12-14 11:44:01 +00003936void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
3937 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
3938 if (Primitive::IsFloatingPointType(select->GetType())) {
3939 locations->SetInAt(0, Location::RequiresFpuRegister());
3940 locations->SetInAt(1, Location::RequiresFpuRegister());
3941 } else {
3942 locations->SetInAt(0, Location::RequiresRegister());
3943 locations->SetInAt(1, Location::RequiresRegister());
3944 }
3945 if (IsBooleanValueOrMaterializedCondition(select->GetCondition())) {
3946 locations->SetInAt(2, Location::RequiresRegister());
3947 }
3948 locations->SetOut(Location::SameAsFirstInput());
3949}
3950
3951void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
3952 LocationSummary* locations = select->GetLocations();
3953 Mips64Label false_target;
3954 GenerateTestAndBranch(select,
3955 /* condition_input_index */ 2,
3956 /* true_target */ nullptr,
3957 &false_target);
3958 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
3959 __ Bind(&false_target);
3960}
3961
David Srbecky0cf44932015-12-09 14:09:59 +00003962void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
3963 new (GetGraph()->GetArena()) LocationSummary(info);
3964}
3965
David Srbeckyd28f4a02016-03-14 17:14:24 +00003966void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
3967 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00003968}
3969
3970void CodeGeneratorMIPS64::GenerateNop() {
3971 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00003972}
3973
Alexey Frunze4dda3372015-06-01 18:31:49 -07003974void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08003975 const FieldInfo& field_info) {
3976 Primitive::Type field_type = field_info.GetFieldType();
3977 bool object_field_get_with_read_barrier =
3978 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
3979 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3980 instruction,
3981 object_field_get_with_read_barrier
3982 ? LocationSummary::kCallOnSlowPath
3983 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07003984 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
3985 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
3986 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003987 locations->SetInAt(0, Location::RequiresRegister());
3988 if (Primitive::IsFloatingPointType(instruction->GetType())) {
3989 locations->SetOut(Location::RequiresFpuRegister());
3990 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08003991 // The output overlaps in the case of an object field get with
3992 // read barriers enabled: we do not want the move to overwrite the
3993 // object's location, as we need it to emit the read barrier.
3994 locations->SetOut(Location::RequiresRegister(),
3995 object_field_get_with_read_barrier
3996 ? Location::kOutputOverlap
3997 : Location::kNoOutputOverlap);
3998 }
3999 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4000 // We need a temporary register for the read barrier marking slow
4001 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
4002 locations->AddTemp(Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004003 }
4004}
4005
4006void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4007 const FieldInfo& field_info) {
4008 Primitive::Type type = field_info.GetFieldType();
4009 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004010 Location obj_loc = locations->InAt(0);
4011 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4012 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004013 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004014 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004015 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004016 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4017
Alexey Frunze4dda3372015-06-01 18:31:49 -07004018 switch (type) {
4019 case Primitive::kPrimBoolean:
4020 load_type = kLoadUnsignedByte;
4021 break;
4022 case Primitive::kPrimByte:
4023 load_type = kLoadSignedByte;
4024 break;
4025 case Primitive::kPrimShort:
4026 load_type = kLoadSignedHalfword;
4027 break;
4028 case Primitive::kPrimChar:
4029 load_type = kLoadUnsignedHalfword;
4030 break;
4031 case Primitive::kPrimInt:
4032 case Primitive::kPrimFloat:
4033 load_type = kLoadWord;
4034 break;
4035 case Primitive::kPrimLong:
4036 case Primitive::kPrimDouble:
4037 load_type = kLoadDoubleword;
4038 break;
4039 case Primitive::kPrimNot:
4040 load_type = kLoadUnsignedWord;
4041 break;
4042 case Primitive::kPrimVoid:
4043 LOG(FATAL) << "Unreachable type " << type;
4044 UNREACHABLE();
4045 }
4046 if (!Primitive::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004047 DCHECK(dst_loc.IsRegister());
4048 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
4049 if (type == Primitive::kPrimNot) {
4050 // /* HeapReference<Object> */ dst = *(obj + offset)
4051 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
4052 Location temp_loc = locations->GetTemp(0);
4053 // Note that a potential implicit null check is handled in this
4054 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4055 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4056 dst_loc,
4057 obj,
4058 offset,
4059 temp_loc,
4060 /* needs_null_check */ true);
4061 if (is_volatile) {
4062 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4063 }
4064 } else {
4065 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4066 if (is_volatile) {
4067 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4068 }
4069 // If read barriers are enabled, emit read barriers other than
4070 // Baker's using a slow path (and also unpoison the loaded
4071 // reference, if heap poisoning is enabled).
4072 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4073 }
4074 } else {
4075 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4076 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004077 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004078 DCHECK(dst_loc.IsFpuRegister());
4079 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004080 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004081 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004082
Alexey Frunze15958152017-02-09 19:08:30 -08004083 // Memory barriers, in the case of references, are handled in the
4084 // previous switch statement.
4085 if (is_volatile && (type != Primitive::kPrimNot)) {
4086 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004087 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004088}
4089
4090void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4091 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4092 LocationSummary* locations =
4093 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
4094 locations->SetInAt(0, Location::RequiresRegister());
4095 if (Primitive::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004096 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004097 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004098 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004099 }
4100}
4101
4102void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004103 const FieldInfo& field_info,
4104 bool value_can_be_null) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004105 Primitive::Type type = field_info.GetFieldType();
4106 LocationSummary* locations = instruction->GetLocations();
4107 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004108 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004109 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004110 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004111 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4112 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004113 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4114
Alexey Frunze4dda3372015-06-01 18:31:49 -07004115 switch (type) {
4116 case Primitive::kPrimBoolean:
4117 case Primitive::kPrimByte:
4118 store_type = kStoreByte;
4119 break;
4120 case Primitive::kPrimShort:
4121 case Primitive::kPrimChar:
4122 store_type = kStoreHalfword;
4123 break;
4124 case Primitive::kPrimInt:
4125 case Primitive::kPrimFloat:
4126 case Primitive::kPrimNot:
4127 store_type = kStoreWord;
4128 break;
4129 case Primitive::kPrimLong:
4130 case Primitive::kPrimDouble:
4131 store_type = kStoreDoubleword;
4132 break;
4133 case Primitive::kPrimVoid:
4134 LOG(FATAL) << "Unreachable type " << type;
4135 UNREACHABLE();
4136 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004137
Alexey Frunze15958152017-02-09 19:08:30 -08004138 if (is_volatile) {
4139 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4140 }
4141
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004142 if (value_location.IsConstant()) {
4143 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4144 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4145 } else {
4146 if (!Primitive::IsFloatingPointType(type)) {
4147 DCHECK(value_location.IsRegister());
4148 GpuRegister src = value_location.AsRegister<GpuRegister>();
4149 if (kPoisonHeapReferences && needs_write_barrier) {
4150 // Note that in the case where `value` is a null reference,
4151 // we do not enter this block, as a null reference does not
4152 // need poisoning.
4153 DCHECK_EQ(type, Primitive::kPrimNot);
4154 __ PoisonHeapReference(TMP, src);
4155 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4156 } else {
4157 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4158 }
4159 } else {
4160 DCHECK(value_location.IsFpuRegister());
4161 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4162 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4163 }
4164 }
Alexey Frunze15958152017-02-09 19:08:30 -08004165
Alexey Frunzec061de12017-02-14 13:27:23 -08004166 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004167 DCHECK(value_location.IsRegister());
4168 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004169 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004170 }
Alexey Frunze15958152017-02-09 19:08:30 -08004171
4172 if (is_volatile) {
4173 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4174 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004175}
4176
4177void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4178 HandleFieldGet(instruction, instruction->GetFieldInfo());
4179}
4180
4181void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4182 HandleFieldGet(instruction, instruction->GetFieldInfo());
4183}
4184
4185void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4186 HandleFieldSet(instruction, instruction->GetFieldInfo());
4187}
4188
4189void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004190 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004191}
4192
Alexey Frunze15958152017-02-09 19:08:30 -08004193void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4194 HInstruction* instruction,
4195 Location out,
4196 uint32_t offset,
4197 Location maybe_temp,
4198 ReadBarrierOption read_barrier_option) {
4199 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4200 if (read_barrier_option == kWithReadBarrier) {
4201 CHECK(kEmitCompilerReadBarrier);
4202 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4203 if (kUseBakerReadBarrier) {
4204 // Load with fast path based Baker's read barrier.
4205 // /* HeapReference<Object> */ out = *(out + offset)
4206 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4207 out,
4208 out_reg,
4209 offset,
4210 maybe_temp,
4211 /* needs_null_check */ false);
4212 } else {
4213 // Load with slow path based read barrier.
4214 // Save the value of `out` into `maybe_temp` before overwriting it
4215 // in the following move operation, as we will need it for the
4216 // read barrier below.
4217 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4218 // /* HeapReference<Object> */ out = *(out + offset)
4219 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4220 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4221 }
4222 } else {
4223 // Plain load with no read barrier.
4224 // /* HeapReference<Object> */ out = *(out + offset)
4225 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4226 __ MaybeUnpoisonHeapReference(out_reg);
4227 }
4228}
4229
4230void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4231 HInstruction* instruction,
4232 Location out,
4233 Location obj,
4234 uint32_t offset,
4235 Location maybe_temp,
4236 ReadBarrierOption read_barrier_option) {
4237 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4238 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4239 if (read_barrier_option == kWithReadBarrier) {
4240 CHECK(kEmitCompilerReadBarrier);
4241 if (kUseBakerReadBarrier) {
4242 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4243 // Load with fast path based Baker's read barrier.
4244 // /* HeapReference<Object> */ out = *(obj + offset)
4245 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4246 out,
4247 obj_reg,
4248 offset,
4249 maybe_temp,
4250 /* needs_null_check */ false);
4251 } else {
4252 // Load with slow path based read barrier.
4253 // /* HeapReference<Object> */ out = *(obj + offset)
4254 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4255 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
4256 }
4257 } else {
4258 // Plain load with no read barrier.
4259 // /* HeapReference<Object> */ out = *(obj + offset)
4260 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
4261 __ MaybeUnpoisonHeapReference(out_reg);
4262 }
4263}
4264
Alexey Frunzef63f5692016-12-13 17:43:11 -08004265void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
Alexey Frunze15958152017-02-09 19:08:30 -08004266 HInstruction* instruction,
Alexey Frunzef63f5692016-12-13 17:43:11 -08004267 Location root,
4268 GpuRegister obj,
Alexey Frunze15958152017-02-09 19:08:30 -08004269 uint32_t offset,
4270 ReadBarrierOption read_barrier_option) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004271 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004272 if (read_barrier_option == kWithReadBarrier) {
4273 DCHECK(kEmitCompilerReadBarrier);
4274 if (kUseBakerReadBarrier) {
4275 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
4276 // Baker's read barrier are used:
4277 //
4278 // root = obj.field;
4279 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
4280 // if (temp != null) {
4281 // root = temp(root)
4282 // }
4283
4284 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4285 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
4286 static_assert(
4287 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
4288 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
4289 "have different sizes.");
4290 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
4291 "art::mirror::CompressedReference<mirror::Object> and int32_t "
4292 "have different sizes.");
4293
4294 // Slow path marking the GC root `root`.
4295 Location temp = Location::RegisterLocation(T9);
4296 SlowPathCodeMIPS64* slow_path =
4297 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(
4298 instruction,
4299 root,
4300 /*entrypoint*/ temp);
4301 codegen_->AddSlowPath(slow_path);
4302
4303 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
4304 const int32_t entry_point_offset =
4305 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
4306 // Loading the entrypoint does not require a load acquire since it is only changed when
4307 // threads are suspended or running a checkpoint.
4308 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
4309 // The entrypoint is null when the GC is not marking, this prevents one load compared to
4310 // checking GetIsGcMarking.
4311 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
4312 __ Bind(slow_path->GetExitLabel());
4313 } else {
4314 // GC root loaded through a slow path for read barriers other
4315 // than Baker's.
4316 // /* GcRoot<mirror::Object>* */ root = obj + offset
4317 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
4318 // /* mirror::Object* */ root = root->Read()
4319 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
4320 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08004321 } else {
4322 // Plain GC root load with no read barrier.
4323 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
4324 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
4325 // Note that GC roots are not affected by heap poisoning, thus we
4326 // do not have to unpoison `root_reg` here.
4327 }
4328}
4329
Alexey Frunze15958152017-02-09 19:08:30 -08004330void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
4331 Location ref,
4332 GpuRegister obj,
4333 uint32_t offset,
4334 Location temp,
4335 bool needs_null_check) {
4336 DCHECK(kEmitCompilerReadBarrier);
4337 DCHECK(kUseBakerReadBarrier);
4338
4339 // /* HeapReference<Object> */ ref = *(obj + offset)
4340 Location no_index = Location::NoLocation();
4341 ScaleFactor no_scale_factor = TIMES_1;
4342 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4343 ref,
4344 obj,
4345 offset,
4346 no_index,
4347 no_scale_factor,
4348 temp,
4349 needs_null_check);
4350}
4351
4352void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
4353 Location ref,
4354 GpuRegister obj,
4355 uint32_t data_offset,
4356 Location index,
4357 Location temp,
4358 bool needs_null_check) {
4359 DCHECK(kEmitCompilerReadBarrier);
4360 DCHECK(kUseBakerReadBarrier);
4361
4362 static_assert(
4363 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
4364 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
4365 // /* HeapReference<Object> */ ref =
4366 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
4367 ScaleFactor scale_factor = TIMES_4;
4368 GenerateReferenceLoadWithBakerReadBarrier(instruction,
4369 ref,
4370 obj,
4371 data_offset,
4372 index,
4373 scale_factor,
4374 temp,
4375 needs_null_check);
4376}
4377
4378void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
4379 Location ref,
4380 GpuRegister obj,
4381 uint32_t offset,
4382 Location index,
4383 ScaleFactor scale_factor,
4384 Location temp,
4385 bool needs_null_check,
4386 bool always_update_field) {
4387 DCHECK(kEmitCompilerReadBarrier);
4388 DCHECK(kUseBakerReadBarrier);
4389
4390 // In slow path based read barriers, the read barrier call is
4391 // inserted after the original load. However, in fast path based
4392 // Baker's read barriers, we need to perform the load of
4393 // mirror::Object::monitor_ *before* the original reference load.
4394 // This load-load ordering is required by the read barrier.
4395 // The fast path/slow path (for Baker's algorithm) should look like:
4396 //
4397 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
4398 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
4399 // HeapReference<Object> ref = *src; // Original reference load.
4400 // bool is_gray = (rb_state == ReadBarrier::GrayState());
4401 // if (is_gray) {
4402 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
4403 // }
4404 //
4405 // Note: the original implementation in ReadBarrier::Barrier is
4406 // slightly more complex as it performs additional checks that we do
4407 // not do here for performance reasons.
4408
4409 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
4410 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
4411 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
4412
4413 // /* int32_t */ monitor = obj->monitor_
4414 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
4415 if (needs_null_check) {
4416 MaybeRecordImplicitNullCheck(instruction);
4417 }
4418 // /* LockWord */ lock_word = LockWord(monitor)
4419 static_assert(sizeof(LockWord) == sizeof(int32_t),
4420 "art::LockWord and int32_t have different sizes.");
4421
4422 __ Sync(0); // Barrier to prevent load-load reordering.
4423
4424 // The actual reference load.
4425 if (index.IsValid()) {
4426 // Load types involving an "index": ArrayGet,
4427 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
4428 // intrinsics.
4429 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
4430 if (index.IsConstant()) {
4431 size_t computed_offset =
4432 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
4433 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
4434 } else {
4435 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07004436 if (scale_factor == TIMES_1) {
4437 __ Daddu(TMP, index_reg, obj);
4438 } else {
4439 __ Dlsa(TMP, index_reg, obj, scale_factor);
4440 }
Alexey Frunze15958152017-02-09 19:08:30 -08004441 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
4442 }
4443 } else {
4444 // /* HeapReference<Object> */ ref = *(obj + offset)
4445 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
4446 }
4447
4448 // Object* ref = ref_addr->AsMirrorPtr()
4449 __ MaybeUnpoisonHeapReference(ref_reg);
4450
4451 // Slow path marking the object `ref` when it is gray.
4452 SlowPathCodeMIPS64* slow_path;
4453 if (always_update_field) {
4454 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
4455 // of the form `obj + field_offset`, where `obj` is a register and
4456 // `field_offset` is a register. Thus `offset` and `scale_factor`
4457 // above are expected to be null in this code path.
4458 DCHECK_EQ(offset, 0u);
4459 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
4460 slow_path = new (GetGraph()->GetArena())
4461 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
4462 ref,
4463 obj,
4464 /* field_offset */ index,
4465 temp_reg);
4466 } else {
4467 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
4468 }
4469 AddSlowPath(slow_path);
4470
4471 // if (rb_state == ReadBarrier::GrayState())
4472 // ref = ReadBarrier::Mark(ref);
4473 // Given the numeric representation, it's enough to check the low bit of the
4474 // rb_state. We do that by shifting the bit into the sign bit (31) and
4475 // performing a branch on less than zero.
4476 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
4477 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
4478 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
4479 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
4480 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
4481 __ Bind(slow_path->GetExitLabel());
4482}
4483
4484void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
4485 Location out,
4486 Location ref,
4487 Location obj,
4488 uint32_t offset,
4489 Location index) {
4490 DCHECK(kEmitCompilerReadBarrier);
4491
4492 // Insert a slow path based read barrier *after* the reference load.
4493 //
4494 // If heap poisoning is enabled, the unpoisoning of the loaded
4495 // reference will be carried out by the runtime within the slow
4496 // path.
4497 //
4498 // Note that `ref` currently does not get unpoisoned (when heap
4499 // poisoning is enabled), which is alright as the `ref` argument is
4500 // not used by the artReadBarrierSlow entry point.
4501 //
4502 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
4503 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena())
4504 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
4505 AddSlowPath(slow_path);
4506
4507 __ Bc(slow_path->GetEntryLabel());
4508 __ Bind(slow_path->GetExitLabel());
4509}
4510
4511void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
4512 Location out,
4513 Location ref,
4514 Location obj,
4515 uint32_t offset,
4516 Location index) {
4517 if (kEmitCompilerReadBarrier) {
4518 // Baker's read barriers shall be handled by the fast path
4519 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
4520 DCHECK(!kUseBakerReadBarrier);
4521 // If heap poisoning is enabled, unpoisoning will be taken care of
4522 // by the runtime within the slow path.
4523 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
4524 } else if (kPoisonHeapReferences) {
4525 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
4526 }
4527}
4528
4529void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
4530 Location out,
4531 Location root) {
4532 DCHECK(kEmitCompilerReadBarrier);
4533
4534 // Insert a slow path based read barrier *after* the GC root load.
4535 //
4536 // Note that GC roots are not affected by heap poisoning, so we do
4537 // not need to do anything special for this here.
4538 SlowPathCodeMIPS64* slow_path =
4539 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
4540 AddSlowPath(slow_path);
4541
4542 __ Bc(slow_path->GetEntryLabel());
4543 __ Bind(slow_path->GetExitLabel());
4544}
4545
Alexey Frunze4dda3372015-06-01 18:31:49 -07004546void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004547 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
4548 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07004549 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004550 switch (type_check_kind) {
4551 case TypeCheckKind::kExactCheck:
4552 case TypeCheckKind::kAbstractClassCheck:
4553 case TypeCheckKind::kClassHierarchyCheck:
4554 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08004555 call_kind =
4556 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07004557 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004558 break;
4559 case TypeCheckKind::kArrayCheck:
4560 case TypeCheckKind::kUnresolvedCheck:
4561 case TypeCheckKind::kInterfaceCheck:
4562 call_kind = LocationSummary::kCallOnSlowPath;
4563 break;
4564 }
4565
Alexey Frunze4dda3372015-06-01 18:31:49 -07004566 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004567 if (baker_read_barrier_slow_path) {
4568 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4569 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004570 locations->SetInAt(0, Location::RequiresRegister());
4571 locations->SetInAt(1, Location::RequiresRegister());
4572 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01004573 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07004574 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08004575 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004576}
4577
4578void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004579 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004580 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004581 Location obj_loc = locations->InAt(0);
4582 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004583 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08004584 Location out_loc = locations->Out();
4585 GpuRegister out = out_loc.AsRegister<GpuRegister>();
4586 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
4587 DCHECK_LE(num_temps, 1u);
4588 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004589 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
4590 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
4591 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
4592 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004593 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004594 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004595
4596 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004597 // Avoid this check if we know `obj` is not null.
4598 if (instruction->MustDoNullCheck()) {
4599 __ Move(out, ZERO);
4600 __ Beqzc(obj, &done);
4601 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004602
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004603 switch (type_check_kind) {
4604 case TypeCheckKind::kExactCheck: {
4605 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004606 GenerateReferenceLoadTwoRegisters(instruction,
4607 out_loc,
4608 obj_loc,
4609 class_offset,
4610 maybe_temp_loc,
4611 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004612 // Classes must be equal for the instanceof to succeed.
4613 __ Xor(out, out, cls);
4614 __ Sltiu(out, out, 1);
4615 break;
4616 }
4617
4618 case TypeCheckKind::kAbstractClassCheck: {
4619 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004620 GenerateReferenceLoadTwoRegisters(instruction,
4621 out_loc,
4622 obj_loc,
4623 class_offset,
4624 maybe_temp_loc,
4625 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004626 // If the class is abstract, we eagerly fetch the super class of the
4627 // object to avoid doing a comparison we know will fail.
4628 Mips64Label loop;
4629 __ Bind(&loop);
4630 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08004631 GenerateReferenceLoadOneRegister(instruction,
4632 out_loc,
4633 super_offset,
4634 maybe_temp_loc,
4635 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004636 // If `out` is null, we use it for the result, and jump to `done`.
4637 __ Beqzc(out, &done);
4638 __ Bnec(out, cls, &loop);
4639 __ LoadConst32(out, 1);
4640 break;
4641 }
4642
4643 case TypeCheckKind::kClassHierarchyCheck: {
4644 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004645 GenerateReferenceLoadTwoRegisters(instruction,
4646 out_loc,
4647 obj_loc,
4648 class_offset,
4649 maybe_temp_loc,
4650 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004651 // Walk over the class hierarchy to find a match.
4652 Mips64Label loop, success;
4653 __ Bind(&loop);
4654 __ Beqc(out, cls, &success);
4655 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08004656 GenerateReferenceLoadOneRegister(instruction,
4657 out_loc,
4658 super_offset,
4659 maybe_temp_loc,
4660 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004661 __ Bnezc(out, &loop);
4662 // If `out` is null, we use it for the result, and jump to `done`.
4663 __ Bc(&done);
4664 __ Bind(&success);
4665 __ LoadConst32(out, 1);
4666 break;
4667 }
4668
4669 case TypeCheckKind::kArrayObjectCheck: {
4670 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004671 GenerateReferenceLoadTwoRegisters(instruction,
4672 out_loc,
4673 obj_loc,
4674 class_offset,
4675 maybe_temp_loc,
4676 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004677 // Do an exact check.
4678 Mips64Label success;
4679 __ Beqc(out, cls, &success);
4680 // Otherwise, we need to check that the object's class is a non-primitive array.
4681 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08004682 GenerateReferenceLoadOneRegister(instruction,
4683 out_loc,
4684 component_offset,
4685 maybe_temp_loc,
4686 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004687 // If `out` is null, we use it for the result, and jump to `done`.
4688 __ Beqzc(out, &done);
4689 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
4690 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
4691 __ Sltiu(out, out, 1);
4692 __ Bc(&done);
4693 __ Bind(&success);
4694 __ LoadConst32(out, 1);
4695 break;
4696 }
4697
4698 case TypeCheckKind::kArrayCheck: {
4699 // No read barrier since the slow path will retry upon failure.
4700 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08004701 GenerateReferenceLoadTwoRegisters(instruction,
4702 out_loc,
4703 obj_loc,
4704 class_offset,
4705 maybe_temp_loc,
4706 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004707 DCHECK(locations->OnlyCallsOnSlowPath());
4708 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
4709 /* is_fatal */ false);
4710 codegen_->AddSlowPath(slow_path);
4711 __ Bnec(out, cls, slow_path->GetEntryLabel());
4712 __ LoadConst32(out, 1);
4713 break;
4714 }
4715
4716 case TypeCheckKind::kUnresolvedCheck:
4717 case TypeCheckKind::kInterfaceCheck: {
4718 // Note that we indeed only call on slow path, but we always go
4719 // into the slow path for the unresolved and interface check
4720 // cases.
4721 //
4722 // We cannot directly call the InstanceofNonTrivial runtime
4723 // entry point without resorting to a type checking slow path
4724 // here (i.e. by calling InvokeRuntime directly), as it would
4725 // require to assign fixed registers for the inputs of this
4726 // HInstanceOf instruction (following the runtime calling
4727 // convention), which might be cluttered by the potential first
4728 // read barrier emission at the beginning of this method.
4729 //
4730 // TODO: Introduce a new runtime entry point taking the object
4731 // to test (instead of its class) as argument, and let it deal
4732 // with the read barrier issues. This will let us refactor this
4733 // case of the `switch` code as it was previously (with a direct
4734 // call to the runtime not using a type checking slow path).
4735 // This should also be beneficial for the other cases above.
4736 DCHECK(locations->OnlyCallsOnSlowPath());
4737 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS64(instruction,
4738 /* is_fatal */ false);
4739 codegen_->AddSlowPath(slow_path);
4740 __ Bc(slow_path->GetEntryLabel());
4741 break;
4742 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004743 }
4744
4745 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08004746
4747 if (slow_path != nullptr) {
4748 __ Bind(slow_path->GetExitLabel());
4749 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004750}
4751
4752void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
4753 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4754 locations->SetOut(Location::ConstantLocation(constant));
4755}
4756
4757void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
4758 // Will be generated at use site.
4759}
4760
4761void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
4762 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
4763 locations->SetOut(Location::ConstantLocation(constant));
4764}
4765
4766void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
4767 // Will be generated at use site.
4768}
4769
Calin Juravle175dc732015-08-25 15:42:32 +01004770void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4771 // The trampoline uses the same calling convention as dex calling conventions,
4772 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
4773 // the method_idx.
4774 HandleInvoke(invoke);
4775}
4776
4777void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
4778 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
4779}
4780
Alexey Frunze4dda3372015-06-01 18:31:49 -07004781void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
4782 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
4783 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
4784}
4785
4786void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
4787 HandleInvoke(invoke);
4788 // The register T0 is required to be used for the hidden argument in
4789 // art_quick_imt_conflict_trampoline, so add the hidden argument.
4790 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
4791}
4792
4793void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
4794 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
4795 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004796 Location receiver = invoke->GetLocations()->InAt(0);
4797 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07004798 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004799
4800 // Set the hidden argument.
4801 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
4802 invoke->GetDexMethodIndex());
4803
4804 // temp = object->GetClass();
4805 if (receiver.IsStackSlot()) {
4806 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
4807 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
4808 } else {
4809 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
4810 }
4811 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08004812 // Instead of simply (possibly) unpoisoning `temp` here, we should
4813 // emit a read barrier for the previous class reference load.
4814 // However this is not required in practice, as this is an
4815 // intermediate/temporary reference and because the current
4816 // concurrent copying collector keeps the from-space memory
4817 // intact/accessible until the end of the marking phase (the
4818 // concurrent copying collector may not in the future).
4819 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00004820 __ LoadFromOffset(kLoadDoubleword, temp, temp,
4821 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
4822 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00004823 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004824 // temp = temp->GetImtEntryAt(method_offset);
4825 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
4826 // T9 = temp->GetEntryPoint();
4827 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
4828 // T9();
4829 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004830 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004831 DCHECK(!codegen_->IsLeafMethod());
4832 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
4833}
4834
4835void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07004836 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
4837 if (intrinsic.TryDispatch(invoke)) {
4838 return;
4839 }
4840
Alexey Frunze4dda3372015-06-01 18:31:49 -07004841 HandleInvoke(invoke);
4842}
4843
4844void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00004845 // Explicit clinit checks triggered by static invokes must have been pruned by
4846 // art::PrepareForRegisterAllocation.
4847 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004848
Chris Larsen3039e382015-08-26 07:54:08 -07004849 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
4850 if (intrinsic.TryDispatch(invoke)) {
4851 return;
4852 }
4853
Alexey Frunze4dda3372015-06-01 18:31:49 -07004854 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004855}
4856
Orion Hodsonac141392017-01-13 11:53:47 +00004857void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4858 HandleInvoke(invoke);
4859}
4860
4861void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
4862 codegen_->GenerateInvokePolymorphicCall(invoke);
4863}
4864
Chris Larsen3039e382015-08-26 07:54:08 -07004865static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004866 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07004867 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
4868 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004869 return true;
4870 }
4871 return false;
4872}
4873
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004874HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08004875 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004876 bool fallback_load = false;
4877 switch (desired_string_load_kind) {
4878 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
4879 DCHECK(!GetCompilerOptions().GetCompilePic());
4880 break;
4881 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
4882 DCHECK(GetCompilerOptions().GetCompilePic());
4883 break;
4884 case HLoadString::LoadKind::kBootImageAddress:
4885 break;
4886 case HLoadString::LoadKind::kBssEntry:
4887 DCHECK(!Runtime::Current()->UseJitCompilation());
4888 break;
4889 case HLoadString::LoadKind::kDexCacheViaMethod:
4890 break;
4891 case HLoadString::LoadKind::kJitTableAddress:
4892 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08004893 break;
4894 }
4895 if (fallback_load) {
4896 desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
4897 }
4898 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00004899}
4900
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004901HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
4902 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08004903 bool fallback_load = false;
4904 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00004905 case HLoadClass::LoadKind::kInvalid:
4906 LOG(FATAL) << "UNREACHABLE";
4907 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08004908 case HLoadClass::LoadKind::kReferrersClass:
4909 break;
4910 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
4911 DCHECK(!GetCompilerOptions().GetCompilePic());
4912 break;
4913 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
4914 DCHECK(GetCompilerOptions().GetCompilePic());
4915 break;
4916 case HLoadClass::LoadKind::kBootImageAddress:
4917 break;
Vladimir Marko6bec91c2017-01-09 15:03:12 +00004918 case HLoadClass::LoadKind::kBssEntry:
4919 DCHECK(!Runtime::Current()->UseJitCompilation());
4920 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004921 case HLoadClass::LoadKind::kJitTableAddress:
4922 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08004923 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08004924 case HLoadClass::LoadKind::kDexCacheViaMethod:
4925 break;
4926 }
4927 if (fallback_load) {
4928 desired_class_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
4929 }
4930 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01004931}
4932
Vladimir Markodc151b22015-10-15 18:02:30 +01004933HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
4934 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01004935 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08004936 // On MIPS64 we support all dispatch types.
4937 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01004938}
4939
Alexey Frunze4dda3372015-06-01 18:31:49 -07004940void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
4941 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00004942 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08004943 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
4944 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
4945
Alexey Frunze19f6c692016-11-30 19:19:55 -08004946 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004947 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00004948 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004949 uint32_t offset =
4950 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00004951 __ LoadFromOffset(kLoadDoubleword,
4952 temp.AsRegister<GpuRegister>(),
4953 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004954 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00004955 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01004956 }
Vladimir Marko58155012015-08-19 12:49:41 +00004957 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00004958 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004959 break;
4960 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08004961 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
4962 kLoadDoubleword,
4963 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00004964 break;
Alexey Frunze19f6c692016-11-30 19:19:55 -08004965 case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
4966 uint32_t offset = invoke->GetDexCacheArrayOffset();
4967 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Nicolas Geoffray5d37c152017-01-12 13:25:19 +00004968 NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset);
Alexey Frunze19f6c692016-11-30 19:19:55 -08004969 EmitPcRelativeAddressPlaceholderHigh(info, AT);
4970 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
4971 break;
4972 }
Vladimir Marko58155012015-08-19 12:49:41 +00004973 case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
Vladimir Markoc53c0792015-11-19 15:48:33 +00004974 Location current_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00004975 GpuRegister reg = temp.AsRegister<GpuRegister>();
4976 GpuRegister method_reg;
4977 if (current_method.IsRegister()) {
4978 method_reg = current_method.AsRegister<GpuRegister>();
4979 } else {
4980 // TODO: use the appropriate DCHECK() here if possible.
4981 // DCHECK(invoke->GetLocations()->Intrinsified());
4982 DCHECK(!current_method.IsValid());
4983 method_reg = reg;
4984 __ Ld(reg, SP, kCurrentMethodStackOffset);
4985 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004986
Vladimir Marko58155012015-08-19 12:49:41 +00004987 // temp = temp->dex_cache_resolved_methods_;
Vladimir Marko05792b92015-08-03 11:56:49 +01004988 __ LoadFromOffset(kLoadDoubleword,
Vladimir Marko58155012015-08-19 12:49:41 +00004989 reg,
4990 method_reg,
Vladimir Marko05792b92015-08-03 11:56:49 +01004991 ArtMethod::DexCacheResolvedMethodsOffset(kMips64PointerSize).Int32Value());
Vladimir Marko40ecb122016-04-06 17:33:41 +01004992 // temp = temp[index_in_cache];
4993 // Note: Don't use invoke->GetTargetMethod() as it may point to a different dex file.
4994 uint32_t index_in_cache = invoke->GetDexMethodIndex();
Vladimir Marko58155012015-08-19 12:49:41 +00004995 __ LoadFromOffset(kLoadDoubleword,
4996 reg,
4997 reg,
4998 CodeGenerator::GetCachePointerOffset(index_in_cache));
4999 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005000 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005001 }
5002
Alexey Frunze19f6c692016-11-30 19:19:55 -08005003 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005004 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005005 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005006 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005007 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5008 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5009 __ LoadFromOffset(kLoadDoubleword,
5010 T9,
5011 callee_method.AsRegister<GpuRegister>(),
5012 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005013 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005014 // T9()
5015 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005016 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005017 break;
5018 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005019 DCHECK(!IsLeafMethod());
5020}
5021
5022void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005023 // Explicit clinit checks triggered by static invokes must have been pruned by
5024 // art::PrepareForRegisterAllocation.
5025 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005026
5027 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5028 return;
5029 }
5030
5031 LocationSummary* locations = invoke->GetLocations();
5032 codegen_->GenerateStaticOrDirectCall(invoke,
5033 locations->HasTemps()
5034 ? locations->GetTemp(0)
5035 : Location::NoLocation());
5036 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5037}
5038
Alexey Frunze53afca12015-11-05 16:34:23 -08005039void CodeGeneratorMIPS64::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp_location) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005040 // Use the calling convention instead of the location of the receiver, as
5041 // intrinsics may have put the receiver in a different register. In the intrinsics
5042 // slow path, the arguments have been moved to the right place, so here we are
5043 // guaranteed that the receiver is the first register of the calling convention.
5044 InvokeDexCallingConvention calling_convention;
5045 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5046
Alexey Frunze53afca12015-11-05 16:34:23 -08005047 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005048 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5049 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5050 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005051 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005052
5053 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005054 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005055 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005056 // Instead of simply (possibly) unpoisoning `temp` here, we should
5057 // emit a read barrier for the previous class reference load.
5058 // However this is not required in practice, as this is an
5059 // intermediate/temporary reference and because the current
5060 // concurrent copying collector keeps the from-space memory
5061 // intact/accessible until the end of the marking phase (the
5062 // concurrent copying collector may not in the future).
5063 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005064 // temp = temp->GetMethodAt(method_offset);
5065 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5066 // T9 = temp->GetEntryPoint();
5067 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5068 // T9();
5069 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005070 __ Nop();
Alexey Frunze53afca12015-11-05 16:34:23 -08005071}
5072
5073void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
5074 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5075 return;
5076 }
5077
5078 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005079 DCHECK(!codegen_->IsLeafMethod());
5080 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5081}
5082
5083void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00005084 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5085 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005086 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005087 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
5088 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005089 return;
5090 }
Vladimir Marko41559982017-01-06 14:04:23 +00005091 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005092
Alexey Frunze15958152017-02-09 19:08:30 -08005093 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
5094 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08005095 ? LocationSummary::kCallOnSlowPath
5096 : LocationSummary::kNoCall;
5097 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005098 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
5099 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5100 }
Vladimir Marko41559982017-01-06 14:04:23 +00005101 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005102 locations->SetInAt(0, Location::RequiresRegister());
5103 }
5104 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005105 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
5106 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5107 // Rely on the type resolution or initialization and marking to save everything we need.
5108 RegisterSet caller_saves = RegisterSet::Empty();
5109 InvokeRuntimeCallingConvention calling_convention;
5110 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5111 locations->SetCustomSlowPathCallerSaves(caller_saves);
5112 } else {
5113 // For non-Baker read barrier we have a temp-clobbering call.
5114 }
5115 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005116}
5117
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005118// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5119// move.
5120void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00005121 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
5122 if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5123 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01005124 return;
5125 }
Vladimir Marko41559982017-01-06 14:04:23 +00005126 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01005127
Vladimir Marko41559982017-01-06 14:04:23 +00005128 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005129 Location out_loc = locations->Out();
5130 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5131 GpuRegister current_method_reg = ZERO;
5132 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
5133 load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
5134 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
5135 }
5136
Alexey Frunze15958152017-02-09 19:08:30 -08005137 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
5138 ? kWithoutReadBarrier
5139 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005140 bool generate_null_check = false;
5141 switch (load_kind) {
5142 case HLoadClass::LoadKind::kReferrersClass:
5143 DCHECK(!cls->CanCallRuntime());
5144 DCHECK(!cls->MustGenerateClinitCheck());
5145 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
5146 GenerateGcRootFieldLoad(cls,
5147 out_loc,
5148 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08005149 ArtMethod::DeclaringClassOffset().Int32Value(),
5150 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005151 break;
5152 case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005153 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08005154 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005155 __ LoadLiteral(out,
5156 kLoadUnsignedWord,
5157 codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
5158 cls->GetTypeIndex()));
5159 break;
5160 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005161 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08005162 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005163 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
5164 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
5165 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
5166 __ Daddiu(out, AT, /* placeholder */ 0x5678);
5167 break;
5168 }
5169 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08005170 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00005171 uint32_t address = dchecked_integral_cast<uint32_t>(
5172 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
5173 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005174 __ LoadLiteral(out,
5175 kLoadUnsignedWord,
5176 codegen_->DeduplicateBootImageAddressLiteral(address));
5177 break;
5178 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005179 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005180 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko1998cd02017-01-13 13:02:58 +00005181 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunzec061de12017-02-14 13:27:23 -08005182 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out);
Alexey Frunze15958152017-02-09 19:08:30 -08005183 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005184 generate_null_check = true;
5185 break;
5186 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08005187 case HLoadClass::LoadKind::kJitTableAddress:
5188 __ LoadLiteral(out,
5189 kLoadUnsignedWord,
5190 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
5191 cls->GetTypeIndex(),
5192 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08005193 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005194 break;
Vladimir Marko41559982017-01-06 14:04:23 +00005195 case HLoadClass::LoadKind::kDexCacheViaMethod:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005196 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00005197 LOG(FATAL) << "UNREACHABLE";
5198 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005199 }
5200
5201 if (generate_null_check || cls->MustGenerateClinitCheck()) {
5202 DCHECK(cls->CanCallRuntime());
5203 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS64(
5204 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
5205 codegen_->AddSlowPath(slow_path);
5206 if (generate_null_check) {
5207 __ Beqzc(out, slow_path->GetEntryLabel());
5208 }
5209 if (cls->MustGenerateClinitCheck()) {
5210 GenerateClassInitializationCheck(slow_path, out);
5211 } else {
5212 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005213 }
5214 }
5215}
5216
David Brazdilcb1c0552015-08-04 16:22:25 +01005217static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07005218 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01005219}
5220
Alexey Frunze4dda3372015-06-01 18:31:49 -07005221void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
5222 LocationSummary* locations =
5223 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
5224 locations->SetOut(Location::RequiresRegister());
5225}
5226
5227void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
5228 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01005229 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
5230}
5231
5232void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
5233 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
5234}
5235
5236void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
5237 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005238}
5239
Alexey Frunze4dda3372015-06-01 18:31:49 -07005240void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005241 HLoadString::LoadKind load_kind = load->GetLoadKind();
5242 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00005243 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005244 if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) {
5245 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005246 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08005247 } else {
5248 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07005249 if (load_kind == HLoadString::LoadKind::kBssEntry) {
5250 if (!kUseReadBarrier || kUseBakerReadBarrier) {
5251 // Rely on the pResolveString and marking to save everything we need.
5252 RegisterSet caller_saves = RegisterSet::Empty();
5253 InvokeRuntimeCallingConvention calling_convention;
5254 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5255 locations->SetCustomSlowPathCallerSaves(caller_saves);
5256 } else {
5257 // For non-Baker read barrier we have a temp-clobbering call.
5258 }
5259 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005260 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005261}
5262
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005263// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
5264// move.
5265void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005266 HLoadString::LoadKind load_kind = load->GetLoadKind();
5267 LocationSummary* locations = load->GetLocations();
5268 Location out_loc = locations->Out();
5269 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5270
5271 switch (load_kind) {
5272 case HLoadString::LoadKind::kBootImageLinkTimeAddress:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005273 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005274 __ LoadLiteral(out,
5275 kLoadUnsignedWord,
5276 codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
5277 load->GetStringIndex()));
5278 return; // No dex cache slow path.
5279 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
5280 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
5281 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005282 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005283 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
5284 __ Daddiu(out, AT, /* placeholder */ 0x5678);
5285 return; // No dex cache slow path.
5286 }
5287 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00005288 uint32_t address = dchecked_integral_cast<uint32_t>(
5289 reinterpret_cast<uintptr_t>(load->GetString().Get()));
5290 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005291 __ LoadLiteral(out,
5292 kLoadUnsignedWord,
5293 codegen_->DeduplicateBootImageAddressLiteral(address));
5294 return; // No dex cache slow path.
5295 }
5296 case HLoadString::LoadKind::kBssEntry: {
5297 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
5298 CodeGeneratorMIPS64::PcRelativePatchInfo* info =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005299 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunzec061de12017-02-14 13:27:23 -08005300 codegen_->EmitPcRelativeAddressPlaceholderHigh(info, out);
Alexey Frunze15958152017-02-09 19:08:30 -08005301 GenerateGcRootFieldLoad(load,
5302 out_loc,
5303 out,
5304 /* placeholder */ 0x5678,
5305 kCompilerReadBarrierOption);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005306 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
5307 codegen_->AddSlowPath(slow_path);
5308 __ Beqzc(out, slow_path->GetEntryLabel());
5309 __ Bind(slow_path->GetExitLabel());
5310 return;
5311 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08005312 case HLoadString::LoadKind::kJitTableAddress:
5313 __ LoadLiteral(out,
5314 kLoadUnsignedWord,
5315 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
5316 load->GetStringIndex(),
5317 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08005318 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08005319 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005320 default:
5321 break;
5322 }
5323
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07005324 // TODO: Re-add the compiler code to do string dex cache lookup again.
Alexey Frunzef63f5692016-12-13 17:43:11 -08005325 DCHECK(load_kind == HLoadString::LoadKind::kDexCacheViaMethod);
5326 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005327 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08005328 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
5329 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
5330 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005331}
5332
Alexey Frunze4dda3372015-06-01 18:31:49 -07005333void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
5334 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
5335 locations->SetOut(Location::ConstantLocation(constant));
5336}
5337
5338void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
5339 // Will be generated at use site.
5340}
5341
5342void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
5343 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005344 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005345 InvokeRuntimeCallingConvention calling_convention;
5346 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5347}
5348
5349void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01005350 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07005351 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01005352 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005353 if (instruction->IsEnter()) {
5354 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
5355 } else {
5356 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
5357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005358}
5359
5360void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
5361 LocationSummary* locations =
5362 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
5363 switch (mul->GetResultType()) {
5364 case Primitive::kPrimInt:
5365 case Primitive::kPrimLong:
5366 locations->SetInAt(0, Location::RequiresRegister());
5367 locations->SetInAt(1, Location::RequiresRegister());
5368 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5369 break;
5370
5371 case Primitive::kPrimFloat:
5372 case Primitive::kPrimDouble:
5373 locations->SetInAt(0, Location::RequiresFpuRegister());
5374 locations->SetInAt(1, Location::RequiresFpuRegister());
5375 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5376 break;
5377
5378 default:
5379 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
5380 }
5381}
5382
5383void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
5384 Primitive::Type type = instruction->GetType();
5385 LocationSummary* locations = instruction->GetLocations();
5386
5387 switch (type) {
5388 case Primitive::kPrimInt:
5389 case Primitive::kPrimLong: {
5390 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5391 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
5392 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
5393 if (type == Primitive::kPrimInt)
5394 __ MulR6(dst, lhs, rhs);
5395 else
5396 __ Dmul(dst, lhs, rhs);
5397 break;
5398 }
5399 case Primitive::kPrimFloat:
5400 case Primitive::kPrimDouble: {
5401 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
5402 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
5403 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
5404 if (type == Primitive::kPrimFloat)
5405 __ MulS(dst, lhs, rhs);
5406 else
5407 __ MulD(dst, lhs, rhs);
5408 break;
5409 }
5410 default:
5411 LOG(FATAL) << "Unexpected mul type " << type;
5412 }
5413}
5414
5415void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
5416 LocationSummary* locations =
5417 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
5418 switch (neg->GetResultType()) {
5419 case Primitive::kPrimInt:
5420 case Primitive::kPrimLong:
5421 locations->SetInAt(0, Location::RequiresRegister());
5422 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5423 break;
5424
5425 case Primitive::kPrimFloat:
5426 case Primitive::kPrimDouble:
5427 locations->SetInAt(0, Location::RequiresFpuRegister());
5428 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
5429 break;
5430
5431 default:
5432 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
5433 }
5434}
5435
5436void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
5437 Primitive::Type type = instruction->GetType();
5438 LocationSummary* locations = instruction->GetLocations();
5439
5440 switch (type) {
5441 case Primitive::kPrimInt:
5442 case Primitive::kPrimLong: {
5443 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5444 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5445 if (type == Primitive::kPrimInt)
5446 __ Subu(dst, ZERO, src);
5447 else
5448 __ Dsubu(dst, ZERO, src);
5449 break;
5450 }
5451 case Primitive::kPrimFloat:
5452 case Primitive::kPrimDouble: {
5453 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
5454 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
5455 if (type == Primitive::kPrimFloat)
5456 __ NegS(dst, src);
5457 else
5458 __ NegD(dst, src);
5459 break;
5460 }
5461 default:
5462 LOG(FATAL) << "Unexpected neg type " << type;
5463 }
5464}
5465
5466void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
5467 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005468 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005469 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005470 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005471 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5472 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005473}
5474
5475void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08005476 // Note: if heap poisoning is enabled, the entry point takes care
5477 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00005478 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
5479 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005480}
5481
5482void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
5483 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005484 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005485 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00005486 if (instruction->IsStringAlloc()) {
5487 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
5488 } else {
5489 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00005490 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005491 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
5492}
5493
5494void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08005495 // Note: if heap poisoning is enabled, the entry point takes care
5496 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00005497 if (instruction->IsStringAlloc()) {
5498 // String is allocated through StringFactory. Call NewEmptyString entry point.
5499 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02005500 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07005501 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00005502 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
5503 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
5504 __ Jalr(T9);
5505 __ Nop();
5506 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
5507 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01005508 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00005509 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00005510 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005511}
5512
5513void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
5514 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5515 locations->SetInAt(0, Location::RequiresRegister());
5516 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5517}
5518
5519void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
5520 Primitive::Type type = instruction->GetType();
5521 LocationSummary* locations = instruction->GetLocations();
5522
5523 switch (type) {
5524 case Primitive::kPrimInt:
5525 case Primitive::kPrimLong: {
5526 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5527 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5528 __ Nor(dst, src, ZERO);
5529 break;
5530 }
5531
5532 default:
5533 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
5534 }
5535}
5536
5537void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
5538 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5539 locations->SetInAt(0, Location::RequiresRegister());
5540 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5541}
5542
5543void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
5544 LocationSummary* locations = instruction->GetLocations();
5545 __ Xori(locations->Out().AsRegister<GpuRegister>(),
5546 locations->InAt(0).AsRegister<GpuRegister>(),
5547 1);
5548}
5549
5550void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01005551 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
5552 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005553}
5554
Calin Juravle2ae48182016-03-16 14:05:09 +00005555void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
5556 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005557 return;
5558 }
5559 Location obj = instruction->GetLocations()->InAt(0);
5560
5561 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00005562 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005563}
5564
Calin Juravle2ae48182016-03-16 14:05:09 +00005565void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005566 SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00005567 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005568
5569 Location obj = instruction->GetLocations()->InAt(0);
5570
5571 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5572}
5573
5574void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00005575 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005576}
5577
5578void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
5579 HandleBinaryOp(instruction);
5580}
5581
5582void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
5583 HandleBinaryOp(instruction);
5584}
5585
5586void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
5587 LOG(FATAL) << "Unreachable";
5588}
5589
5590void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
5591 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
5592}
5593
5594void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
5595 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
5596 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
5597 if (location.IsStackSlot()) {
5598 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5599 } else if (location.IsDoubleStackSlot()) {
5600 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
5601 }
5602 locations->SetOut(location);
5603}
5604
5605void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
5606 ATTRIBUTE_UNUSED) {
5607 // Nothing to do, the parameter is already at its location.
5608}
5609
5610void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
5611 LocationSummary* locations =
5612 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
5613 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
5614}
5615
5616void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
5617 ATTRIBUTE_UNUSED) {
5618 // Nothing to do, the method is already at its location.
5619}
5620
5621void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
5622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01005623 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005624 locations->SetInAt(i, Location::Any());
5625 }
5626 locations->SetOut(Location::Any());
5627}
5628
5629void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
5630 LOG(FATAL) << "Unreachable";
5631}
5632
5633void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
5634 Primitive::Type type = rem->GetResultType();
5635 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005636 Primitive::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
5637 : LocationSummary::kNoCall;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005638 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
5639
5640 switch (type) {
5641 case Primitive::kPrimInt:
5642 case Primitive::kPrimLong:
5643 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07005644 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005645 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
5646 break;
5647
5648 case Primitive::kPrimFloat:
5649 case Primitive::kPrimDouble: {
5650 InvokeRuntimeCallingConvention calling_convention;
5651 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
5652 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
5653 locations->SetOut(calling_convention.GetReturnLocation(type));
5654 break;
5655 }
5656
5657 default:
5658 LOG(FATAL) << "Unexpected rem type " << type;
5659 }
5660}
5661
5662void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
5663 Primitive::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005664
5665 switch (type) {
5666 case Primitive::kPrimInt:
Alexey Frunzec857c742015-09-23 15:12:39 -07005667 case Primitive::kPrimLong:
5668 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005669 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005670
5671 case Primitive::kPrimFloat:
5672 case Primitive::kPrimDouble: {
Serban Constantinescufc734082016-07-19 17:18:07 +01005673 QuickEntrypointEnum entrypoint = (type == Primitive::kPrimFloat) ? kQuickFmodf : kQuickFmod;
5674 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00005675 if (type == Primitive::kPrimFloat) {
5676 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
5677 } else {
5678 CheckEntrypointTypes<kQuickFmod, double, double, double>();
5679 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005680 break;
5681 }
5682 default:
5683 LOG(FATAL) << "Unexpected rem type " << type;
5684 }
5685}
5686
Igor Murashkind01745e2017-04-05 16:40:31 -07005687void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
5688 constructor_fence->SetLocations(nullptr);
5689}
5690
5691void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
5692 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
5693 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
5694}
5695
Alexey Frunze4dda3372015-06-01 18:31:49 -07005696void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5697 memory_barrier->SetLocations(nullptr);
5698}
5699
5700void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
5701 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
5702}
5703
5704void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
5705 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
5706 Primitive::Type return_type = ret->InputAt(0)->GetType();
5707 locations->SetInAt(0, Mips64ReturnLocation(return_type));
5708}
5709
5710void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
5711 codegen_->GenerateFrameExit();
5712}
5713
5714void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
5715 ret->SetLocations(nullptr);
5716}
5717
5718void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
5719 codegen_->GenerateFrameExit();
5720}
5721
Alexey Frunze92d90602015-12-18 18:16:36 -08005722void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
5723 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005724}
5725
Alexey Frunze92d90602015-12-18 18:16:36 -08005726void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
5727 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00005728}
5729
Alexey Frunze4dda3372015-06-01 18:31:49 -07005730void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
5731 HandleShift(shl);
5732}
5733
5734void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
5735 HandleShift(shl);
5736}
5737
5738void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
5739 HandleShift(shr);
5740}
5741
5742void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
5743 HandleShift(shr);
5744}
5745
Alexey Frunze4dda3372015-06-01 18:31:49 -07005746void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
5747 HandleBinaryOp(instruction);
5748}
5749
5750void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
5751 HandleBinaryOp(instruction);
5752}
5753
5754void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5755 HandleFieldGet(instruction, instruction->GetFieldInfo());
5756}
5757
5758void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
5759 HandleFieldGet(instruction, instruction->GetFieldInfo());
5760}
5761
5762void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
5763 HandleFieldSet(instruction, instruction->GetFieldInfo());
5764}
5765
5766void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005767 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005768}
5769
Calin Juravlee460d1d2015-09-29 04:52:17 +01005770void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
5771 HUnresolvedInstanceFieldGet* instruction) {
5772 FieldAccessCallingConventionMIPS64 calling_convention;
5773 codegen_->CreateUnresolvedFieldLocationSummary(
5774 instruction, instruction->GetFieldType(), calling_convention);
5775}
5776
5777void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
5778 HUnresolvedInstanceFieldGet* instruction) {
5779 FieldAccessCallingConventionMIPS64 calling_convention;
5780 codegen_->GenerateUnresolvedFieldAccess(instruction,
5781 instruction->GetFieldType(),
5782 instruction->GetFieldIndex(),
5783 instruction->GetDexPc(),
5784 calling_convention);
5785}
5786
5787void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
5788 HUnresolvedInstanceFieldSet* instruction) {
5789 FieldAccessCallingConventionMIPS64 calling_convention;
5790 codegen_->CreateUnresolvedFieldLocationSummary(
5791 instruction, instruction->GetFieldType(), calling_convention);
5792}
5793
5794void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
5795 HUnresolvedInstanceFieldSet* instruction) {
5796 FieldAccessCallingConventionMIPS64 calling_convention;
5797 codegen_->GenerateUnresolvedFieldAccess(instruction,
5798 instruction->GetFieldType(),
5799 instruction->GetFieldIndex(),
5800 instruction->GetDexPc(),
5801 calling_convention);
5802}
5803
5804void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
5805 HUnresolvedStaticFieldGet* instruction) {
5806 FieldAccessCallingConventionMIPS64 calling_convention;
5807 codegen_->CreateUnresolvedFieldLocationSummary(
5808 instruction, instruction->GetFieldType(), calling_convention);
5809}
5810
5811void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
5812 HUnresolvedStaticFieldGet* instruction) {
5813 FieldAccessCallingConventionMIPS64 calling_convention;
5814 codegen_->GenerateUnresolvedFieldAccess(instruction,
5815 instruction->GetFieldType(),
5816 instruction->GetFieldIndex(),
5817 instruction->GetDexPc(),
5818 calling_convention);
5819}
5820
5821void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
5822 HUnresolvedStaticFieldSet* instruction) {
5823 FieldAccessCallingConventionMIPS64 calling_convention;
5824 codegen_->CreateUnresolvedFieldLocationSummary(
5825 instruction, instruction->GetFieldType(), calling_convention);
5826}
5827
5828void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
5829 HUnresolvedStaticFieldSet* instruction) {
5830 FieldAccessCallingConventionMIPS64 calling_convention;
5831 codegen_->GenerateUnresolvedFieldAccess(instruction,
5832 instruction->GetFieldType(),
5833 instruction->GetFieldIndex(),
5834 instruction->GetDexPc(),
5835 calling_convention);
5836}
5837
Alexey Frunze4dda3372015-06-01 18:31:49 -07005838void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01005839 LocationSummary* locations =
5840 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01005841 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005842}
5843
5844void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
5845 HBasicBlock* block = instruction->GetBlock();
5846 if (block->GetLoopInformation() != nullptr) {
5847 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
5848 // The back edge will generate the suspend check.
5849 return;
5850 }
5851 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
5852 // The goto will generate the suspend check.
5853 return;
5854 }
5855 GenerateSuspendCheck(instruction, nullptr);
5856}
5857
Alexey Frunze4dda3372015-06-01 18:31:49 -07005858void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
5859 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01005860 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005861 InvokeRuntimeCallingConvention calling_convention;
5862 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5863}
5864
5865void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01005866 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005867 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
5868}
5869
5870void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
5871 Primitive::Type input_type = conversion->GetInputType();
5872 Primitive::Type result_type = conversion->GetResultType();
5873 DCHECK_NE(input_type, result_type);
5874
5875 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
5876 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
5877 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
5878 }
5879
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005880 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion);
5881
5882 if (Primitive::IsFloatingPointType(input_type)) {
5883 locations->SetInAt(0, Location::RequiresFpuRegister());
5884 } else {
5885 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005886 }
5887
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005888 if (Primitive::IsFloatingPointType(result_type)) {
5889 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005890 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005891 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005892 }
5893}
5894
5895void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
5896 LocationSummary* locations = conversion->GetLocations();
5897 Primitive::Type result_type = conversion->GetResultType();
5898 Primitive::Type input_type = conversion->GetInputType();
5899
5900 DCHECK_NE(input_type, result_type);
5901
5902 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
5903 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5904 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5905
5906 switch (result_type) {
5907 case Primitive::kPrimChar:
5908 __ Andi(dst, src, 0xFFFF);
5909 break;
5910 case Primitive::kPrimByte:
Vladimir Markob52bbde2016-02-12 12:06:05 +00005911 if (input_type == Primitive::kPrimLong) {
5912 // Type conversion from long to types narrower than int is a result of code
5913 // transformations. To avoid unpredictable results for SEB and SEH, we first
5914 // need to sign-extend the low 32-bit value into bits 32 through 63.
5915 __ Sll(dst, src, 0);
5916 __ Seb(dst, dst);
5917 } else {
5918 __ Seb(dst, src);
5919 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005920 break;
5921 case Primitive::kPrimShort:
Vladimir Markob52bbde2016-02-12 12:06:05 +00005922 if (input_type == Primitive::kPrimLong) {
5923 // Type conversion from long to types narrower than int is a result of code
5924 // transformations. To avoid unpredictable results for SEB and SEH, we first
5925 // need to sign-extend the low 32-bit value into bits 32 through 63.
5926 __ Sll(dst, src, 0);
5927 __ Seh(dst, dst);
5928 } else {
5929 __ Seh(dst, src);
5930 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005931 break;
5932 case Primitive::kPrimInt:
5933 case Primitive::kPrimLong:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01005934 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
5935 // conversions, except when the input and output registers are the same and we are not
5936 // converting longs to shorter types. In these cases, do nothing.
5937 if ((input_type == Primitive::kPrimLong) || (dst != src)) {
5938 __ Sll(dst, src, 0);
5939 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005940 break;
5941
5942 default:
5943 LOG(FATAL) << "Unexpected type conversion from " << input_type
5944 << " to " << result_type;
5945 }
5946 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005947 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
5948 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
5949 if (input_type == Primitive::kPrimLong) {
5950 __ Dmtc1(src, FTMP);
5951 if (result_type == Primitive::kPrimFloat) {
5952 __ Cvtsl(dst, FTMP);
5953 } else {
5954 __ Cvtdl(dst, FTMP);
5955 }
5956 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005957 __ Mtc1(src, FTMP);
5958 if (result_type == Primitive::kPrimFloat) {
5959 __ Cvtsw(dst, FTMP);
5960 } else {
5961 __ Cvtdw(dst, FTMP);
5962 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005963 }
5964 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
5965 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08005966 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
5967 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
5968 Mips64Label truncate;
5969 Mips64Label done;
5970
5971 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
5972 // value when the input is either a NaN or is outside of the range of the output type
5973 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
5974 // the same result.
5975 //
5976 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
5977 // value of the output type if the input is outside of the range after the truncation or
5978 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
5979 // results. This matches the desired float/double-to-int/long conversion exactly.
5980 //
5981 // So, NAN2008 affects handling of negative values and NaNs by the truncate instruction.
5982 //
5983 // The following code supports both NAN2008=0 and NAN2008=1 behaviors of the truncate
5984 // instruction, the reason being that the emulator implements NAN2008=0 on MIPS64R6,
5985 // even though it must be NAN2008=1 on R6.
5986 //
5987 // The code takes care of the different behaviors by first comparing the input to the
5988 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
5989 // If the input is greater than or equal to the minimum, it procedes to the truncate
5990 // instruction, which will handle such an input the same way irrespective of NAN2008.
5991 // Otherwise the input is compared to itself to determine whether it is a NaN or not
5992 // in order to return either zero or the minimum value.
5993 //
5994 // TODO: simplify this when the emulator correctly implements NAN2008=1 behavior of the
5995 // truncate instruction for MIPS64R6.
5996 if (input_type == Primitive::kPrimFloat) {
5997 uint32_t min_val = (result_type == Primitive::kPrimLong)
5998 ? bit_cast<uint32_t, float>(std::numeric_limits<int64_t>::min())
5999 : bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
6000 __ LoadConst32(TMP, min_val);
6001 __ Mtc1(TMP, FTMP);
6002 __ CmpLeS(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006003 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006004 uint64_t min_val = (result_type == Primitive::kPrimLong)
6005 ? bit_cast<uint64_t, double>(std::numeric_limits<int64_t>::min())
6006 : bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
6007 __ LoadConst64(TMP, min_val);
6008 __ Dmtc1(TMP, FTMP);
6009 __ CmpLeD(FTMP, FTMP, src);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006010 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006011
6012 __ Bc1nez(FTMP, &truncate);
6013
6014 if (input_type == Primitive::kPrimFloat) {
6015 __ CmpEqS(FTMP, src, src);
6016 } else {
6017 __ CmpEqD(FTMP, src, src);
6018 }
6019 if (result_type == Primitive::kPrimLong) {
6020 __ LoadConst64(dst, std::numeric_limits<int64_t>::min());
6021 } else {
6022 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
6023 }
6024 __ Mfc1(TMP, FTMP);
6025 __ And(dst, dst, TMP);
6026
6027 __ Bc(&done);
6028
6029 __ Bind(&truncate);
6030
6031 if (result_type == Primitive::kPrimLong) {
Roland Levillain888d0672015-11-23 18:53:50 +00006032 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006033 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006034 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006035 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006036 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006037 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006038 } else {
6039 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006040 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006041 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006042 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006043 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006044 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006045 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006046
6047 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006048 } else if (Primitive::IsFloatingPointType(result_type) &&
6049 Primitive::IsFloatingPointType(input_type)) {
6050 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6051 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
6052 if (result_type == Primitive::kPrimFloat) {
6053 __ Cvtsd(dst, src);
6054 } else {
6055 __ Cvtds(dst, src);
6056 }
6057 } else {
6058 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6059 << " to " << result_type;
6060 }
6061}
6062
6063void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6064 HandleShift(ushr);
6065}
6066
6067void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6068 HandleShift(ushr);
6069}
6070
6071void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6072 HandleBinaryOp(instruction);
6073}
6074
6075void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6076 HandleBinaryOp(instruction);
6077}
6078
6079void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6080 // Nothing to do, this should be removed during prepare for register allocator.
6081 LOG(FATAL) << "Unreachable";
6082}
6083
6084void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
6085 // Nothing to do, this should be removed during prepare for register allocator.
6086 LOG(FATAL) << "Unreachable";
6087}
6088
6089void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006090 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006091}
6092
6093void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006094 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006095}
6096
6097void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006098 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006099}
6100
6101void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006102 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006103}
6104
6105void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006106 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006107}
6108
6109void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006110 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006111}
6112
6113void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006114 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006115}
6116
6117void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006118 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006119}
6120
6121void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006122 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006123}
6124
6125void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006126 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006127}
6128
6129void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006130 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006131}
6132
6133void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006134 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006135}
6136
Aart Bike9f37602015-10-09 11:15:55 -07006137void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006138 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006139}
6140
6141void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006142 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006143}
6144
6145void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006146 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006147}
6148
6149void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006150 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006151}
6152
6153void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006154 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006155}
6156
6157void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006158 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006159}
6160
6161void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006162 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006163}
6164
6165void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00006166 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07006167}
6168
Mark Mendellfe57faa2015-09-18 09:26:15 -04006169// Simple implementation of packed switch - generate cascaded compare/jumps.
6170void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6171 LocationSummary* locations =
6172 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
6173 locations->SetInAt(0, Location::RequiresRegister());
6174}
6175
Alexey Frunze0960ac52016-12-20 17:24:59 -08006176void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
6177 int32_t lower_bound,
6178 uint32_t num_entries,
6179 HBasicBlock* switch_block,
6180 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006181 // Create a set of compare/jumps.
6182 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08006183 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006184 // Jump to default if index is negative
6185 // Note: We don't check the case that index is positive while value < lower_bound, because in
6186 // this case, index >= num_entries must be true. So that we can save one branch instruction.
6187 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
6188
Alexey Frunze0960ac52016-12-20 17:24:59 -08006189 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00006190 // Jump to successors[0] if value == lower_bound.
6191 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
6192 int32_t last_index = 0;
6193 for (; num_entries - last_index > 2; last_index += 2) {
6194 __ Addiu(temp_reg, temp_reg, -2);
6195 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
6196 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
6197 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
6198 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
6199 }
6200 if (num_entries - last_index == 2) {
6201 // The last missing case_value.
6202 __ Addiu(temp_reg, temp_reg, -1);
6203 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006204 }
6205
6206 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08006207 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006208 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04006209 }
6210}
6211
Alexey Frunze0960ac52016-12-20 17:24:59 -08006212void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
6213 int32_t lower_bound,
6214 uint32_t num_entries,
6215 HBasicBlock* switch_block,
6216 HBasicBlock* default_block) {
6217 // Create a jump table.
6218 std::vector<Mips64Label*> labels(num_entries);
6219 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
6220 for (uint32_t i = 0; i < num_entries; i++) {
6221 labels[i] = codegen_->GetLabelOf(successors[i]);
6222 }
6223 JumpTable* table = __ CreateJumpTable(std::move(labels));
6224
6225 // Is the value in range?
6226 __ Addiu32(TMP, value_reg, -lower_bound);
6227 __ LoadConst32(AT, num_entries);
6228 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
6229
6230 // We are in the range of the table.
6231 // Load the target address from the jump table, indexing by the value.
6232 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07006233 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08006234 __ Lw(TMP, TMP, 0);
6235 // Compute the absolute target address by adding the table start address
6236 // (the table contains offsets to targets relative to its start).
6237 __ Daddu(TMP, TMP, AT);
6238 // And jump.
6239 __ Jr(TMP);
6240 __ Nop();
6241}
6242
6243void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
6244 int32_t lower_bound = switch_instr->GetStartValue();
6245 uint32_t num_entries = switch_instr->GetNumEntries();
6246 LocationSummary* locations = switch_instr->GetLocations();
6247 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
6248 HBasicBlock* switch_block = switch_instr->GetBlock();
6249 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
6250
6251 if (num_entries > kPackedSwitchJumpTableThreshold) {
6252 GenTableBasedPackedSwitch(value_reg,
6253 lower_bound,
6254 num_entries,
6255 switch_block,
6256 default_block);
6257 } else {
6258 GenPackedSwitchWithCompares(value_reg,
6259 lower_bound,
6260 num_entries,
6261 switch_block,
6262 default_block);
6263 }
6264}
6265
Chris Larsenc9905a62017-03-13 17:06:18 -07006266void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
6267 LocationSummary* locations =
6268 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
6269 locations->SetInAt(0, Location::RequiresRegister());
6270 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006271}
6272
Chris Larsenc9905a62017-03-13 17:06:18 -07006273void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
6274 LocationSummary* locations = instruction->GetLocations();
6275 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
6276 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6277 instruction->GetIndex(), kMips64PointerSize).SizeValue();
6278 __ LoadFromOffset(kLoadDoubleword,
6279 locations->Out().AsRegister<GpuRegister>(),
6280 locations->InAt(0).AsRegister<GpuRegister>(),
6281 method_offset);
6282 } else {
6283 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
6284 instruction->GetIndex(), kMips64PointerSize));
6285 __ LoadFromOffset(kLoadDoubleword,
6286 locations->Out().AsRegister<GpuRegister>(),
6287 locations->InAt(0).AsRegister<GpuRegister>(),
6288 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
6289 __ LoadFromOffset(kLoadDoubleword,
6290 locations->Out().AsRegister<GpuRegister>(),
6291 locations->Out().AsRegister<GpuRegister>(),
6292 method_offset);
6293 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00006294}
6295
Alexey Frunze4dda3372015-06-01 18:31:49 -07006296} // namespace mips64
6297} // namespace art