blob: 2b6928eee2c1d85a487622d41874b3555760e2a6 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Vladimir Markoeebb8212018-06-05 14:57:24 +010027#include "gc/space/image_space.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070028#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070029#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070030#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010031#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070032#include "mirror/array-inl.h"
33#include "mirror/class-inl.h"
34#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010035#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070037#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070038#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070039#include "utils/stack_checks.h"
40
41namespace art {
42namespace mips64 {
43
44static constexpr int kCurrentMethodStackOffset = 0;
45static constexpr GpuRegister kMethodRegisterArgument = A0;
46
Alexey Frunze4147fcc2017-06-17 19:57:27 -070047// Flags controlling the use of thunks for Baker read barriers.
48constexpr bool kBakerReadBarrierThunksEnableForFields = true;
49constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
50constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
51
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010052Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070053 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010054 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010055 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010056 case DataType::Type::kInt8:
57 case DataType::Type::kUint16:
58 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080059 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010060 case DataType::Type::kInt32:
61 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080062 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010063 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070064 return Location::RegisterLocation(V0);
65
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010066 case DataType::Type::kFloat32:
67 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070068 return Location::FpuRegisterLocation(F0);
69
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010070 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070071 return Location();
72 }
73 UNREACHABLE();
74}
75
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010076Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070077 return Mips64ReturnLocation(type);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
81 return Location::RegisterLocation(kMethodRegisterArgument);
82}
83
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010084Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070085 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010086 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070087 LOG(FATAL) << "Unexpected parameter type " << type;
88 }
89
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010090 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070091 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
92 next_location = Location::FpuRegisterLocation(
93 calling_convention.GetFpuRegisterAt(float_index_++));
94 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010095 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070096 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
97 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
98 float_index_++;
99 } else {
100 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100101 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
102 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700103 }
104
105 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100106 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107
Alexey Frunze4dda3372015-06-01 18:31:49 -0700108 return next_location;
109}
110
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100111Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700112 return Mips64ReturnLocation(type);
113}
114
Vladimir Marko3232dbb2018-07-25 15:42:46 +0100115static RegisterSet OneRegInReferenceOutSaveEverythingCallerSaves() {
116 InvokeRuntimeCallingConvention calling_convention;
117 RegisterSet caller_saves = RegisterSet::Empty();
118 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
119 // The reference is returned in the same register. This differs from the standard return location.
120 return caller_saves;
121}
122
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100123// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
124#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700125#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700126
127class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
128 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000129 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130
131 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100132 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700133 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
134 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000135 if (instruction_->CanThrowIntoCatchBlock()) {
136 // Live registers will be restored in the catch block if caught.
137 SaveLiveRegisters(codegen, instruction_->GetLocations());
138 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700139 // We're moving two locations to locations that could overlap, so we need a parallel
140 // move resolver.
141 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100142 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700143 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100144 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100145 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700146 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100147 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100148 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
149 ? kQuickThrowStringBounds
150 : kQuickThrowArrayBounds;
151 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100152 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700153 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
154 }
155
Alexandre Rames8158f282015-08-07 10:26:17 +0100156 bool IsFatal() const OVERRIDE { return true; }
157
Roland Levillain46648892015-06-19 16:07:18 +0100158 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
159
Alexey Frunze4dda3372015-06-01 18:31:49 -0700160 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700161 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
162};
163
164class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
165 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700166 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
167 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700168
169 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
170 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
171 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100172 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700173 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
174 }
175
Alexandre Rames8158f282015-08-07 10:26:17 +0100176 bool IsFatal() const OVERRIDE { return true; }
177
Roland Levillain46648892015-06-19 16:07:18 +0100178 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
179
Alexey Frunze4dda3372015-06-01 18:31:49 -0700180 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700181 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
182};
183
184class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
185 public:
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100186 LoadClassSlowPathMIPS64(HLoadClass* cls, HInstruction* at)
187 : SlowPathCodeMIPS64(at), cls_(cls) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700188 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100189 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700190 }
191
192 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000193 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700194 Location out = locations->Out();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100195 const uint32_t dex_pc = instruction_->GetDexPc();
196 bool must_resolve_type = instruction_->IsLoadClass() && cls_->MustResolveTypeOnSlowPath();
197 bool must_do_clinit = instruction_->IsClinitCheck() || cls_->MustGenerateClinitCheck();
198
Alexey Frunze4dda3372015-06-01 18:31:49 -0700199 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700200 __ Bind(GetEntryLabel());
201 SaveLiveRegisters(codegen, locations);
202
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100203 InvokeRuntimeCallingConvention calling_convention;
204 if (must_resolve_type) {
205 DCHECK(IsSameDexFile(cls_->GetDexFile(), mips64_codegen->GetGraph()->GetDexFile()));
206 dex::TypeIndex type_index = cls_->GetTypeIndex();
207 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Vladimir Marko9d479252018-07-24 11:35:20 +0100208 mips64_codegen->InvokeRuntime(kQuickResolveType, instruction_, dex_pc, this);
209 CheckEntrypointTypes<kQuickResolveType, void*, uint32_t>();
Vladimir Markoa9f303c2018-07-20 16:43:56 +0100210 // If we also must_do_clinit, the resolved type is now in the correct register.
211 } else {
212 DCHECK(must_do_clinit);
213 Location source = instruction_->IsLoadClass() ? out : locations->InAt(0);
214 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
215 source,
216 cls_->GetType());
217 }
218 if (must_do_clinit) {
219 mips64_codegen->InvokeRuntime(kQuickInitializeStaticStorage, instruction_, dex_pc, this);
220 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700221 }
222
223 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700224 if (out.IsValid()) {
225 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100226 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700227 mips64_codegen->MoveLocation(out,
228 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
229 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700230 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700231 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700232
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700233 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700234 }
235
Roland Levillain46648892015-06-19 16:07:18 +0100236 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
237
Alexey Frunze4dda3372015-06-01 18:31:49 -0700238 private:
239 // The class this slow path will load.
240 HLoadClass* const cls_;
241
Alexey Frunze4dda3372015-06-01 18:31:49 -0700242 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
243};
244
245class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
246 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000247 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
248 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249
250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700251 DCHECK(instruction_->IsLoadString());
252 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700253 LocationSummary* locations = instruction_->GetLocations();
254 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000255 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700256 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700257 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700258 __ Bind(GetEntryLabel());
259 SaveLiveRegisters(codegen, locations);
260
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000261 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100262 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 instruction_,
264 instruction_->GetDexPc(),
265 this);
266 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700267
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100268 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700269 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700270 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700272 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800273
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700274 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700275 }
276
Roland Levillain46648892015-06-19 16:07:18 +0100277 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
278
Alexey Frunze4dda3372015-06-01 18:31:49 -0700279 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700280 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
281};
282
283class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
284 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000285 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286
287 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
288 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
289 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000290 if (instruction_->CanThrowIntoCatchBlock()) {
291 // Live registers will be restored in the catch block if caught.
292 SaveLiveRegisters(codegen, instruction_->GetLocations());
293 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100294 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700295 instruction_,
296 instruction_->GetDexPc(),
297 this);
298 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
299 }
300
Alexandre Rames8158f282015-08-07 10:26:17 +0100301 bool IsFatal() const OVERRIDE { return true; }
302
Roland Levillain46648892015-06-19 16:07:18 +0100303 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
304
Alexey Frunze4dda3372015-06-01 18:31:49 -0700305 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700306 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
307};
308
309class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
310 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100311 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000312 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313
314 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200315 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700316 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
317 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200318 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100319 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700320 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200321 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700322 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700323 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700324 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700325 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700326 }
327 }
328
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700329 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700330 DCHECK(successor_ == nullptr);
331 return &return_label_;
332 }
333
Roland Levillain46648892015-06-19 16:07:18 +0100334 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
335
Chris Larsena2045912017-11-02 12:39:54 -0700336 HBasicBlock* GetSuccessor() const {
337 return successor_;
338 }
339
Alexey Frunze4dda3372015-06-01 18:31:49 -0700340 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700341 // If not null, the block to branch to after the suspend check.
342 HBasicBlock* const successor_;
343
344 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700345 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700346
347 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
348};
349
350class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
351 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800352 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
353 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700354
355 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
356 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800357
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100358 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700359 DCHECK(instruction_->IsCheckCast()
360 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
361 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
362
363 __ Bind(GetEntryLabel());
Alexey Frunzedfc30af2018-01-24 16:25:10 -0800364 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800365 SaveLiveRegisters(codegen, locations);
366 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700367
368 // We're moving two locations to locations that could overlap, so we need a parallel
369 // move resolver.
370 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800371 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100373 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800374 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700375 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100376 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700377 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100378 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800379 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100380 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700381 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
382 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700383 } else {
384 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800385 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
386 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700387 }
388
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800389 if (!is_fatal_) {
390 RestoreLiveRegisters(codegen, locations);
391 __ Bc(GetExitLabel());
392 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 }
394
Roland Levillain46648892015-06-19 16:07:18 +0100395 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
396
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800397 bool IsFatal() const OVERRIDE { return is_fatal_; }
398
Alexey Frunze4dda3372015-06-01 18:31:49 -0700399 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800400 const bool is_fatal_;
401
Alexey Frunze4dda3372015-06-01 18:31:49 -0700402 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
403};
404
405class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
406 public:
Aart Bik42249c32016-01-07 15:33:50 -0800407 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000408 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700409
410 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800411 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700412 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100413 LocationSummary* locations = instruction_->GetLocations();
414 SaveLiveRegisters(codegen, locations);
415 InvokeRuntimeCallingConvention calling_convention;
416 __ LoadConst32(calling_convention.GetRegisterAt(0),
417 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100418 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100419 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700420 }
421
Roland Levillain46648892015-06-19 16:07:18 +0100422 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
423
Alexey Frunze4dda3372015-06-01 18:31:49 -0700424 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700425 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
426};
427
Alexey Frunze15958152017-02-09 19:08:30 -0800428class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
429 public:
430 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
431
432 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
433 LocationSummary* locations = instruction_->GetLocations();
434 __ Bind(GetEntryLabel());
435 SaveLiveRegisters(codegen, locations);
436
437 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100438 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800439 parallel_move.AddMove(
440 locations->InAt(0),
441 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100442 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800443 nullptr);
444 parallel_move.AddMove(
445 locations->InAt(1),
446 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100447 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800448 nullptr);
449 parallel_move.AddMove(
450 locations->InAt(2),
451 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100452 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800453 nullptr);
454 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
455
456 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
457 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
458 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
459 RestoreLiveRegisters(codegen, locations);
460 __ Bc(GetExitLabel());
461 }
462
463 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
464
465 private:
466 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
467};
468
469// Slow path marking an object reference `ref` during a read
470// barrier. The field `obj.field` in the object `obj` holding this
471// reference does not get updated by this slow path after marking (see
472// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
473//
474// This means that after the execution of this slow path, `ref` will
475// always be up-to-date, but `obj.field` may not; i.e., after the
476// flip, `ref` will be a to-space reference, but `obj.field` will
477// probably still be a from-space reference (unless it gets updated by
478// another thread, or if another thread installed another object
479// reference (different from `ref`) in `obj.field`).
480//
481// If `entrypoint` is a valid location it is assumed to already be
482// holding the entrypoint. The case where the entrypoint is passed in
483// is for the GcRoot read barrier.
484class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
485 public:
486 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
487 Location ref,
488 Location entrypoint = Location::NoLocation())
489 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
490 DCHECK(kEmitCompilerReadBarrier);
491 }
492
493 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
494
495 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
496 LocationSummary* locations = instruction_->GetLocations();
497 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
498 DCHECK(locations->CanCall());
499 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
500 DCHECK(instruction_->IsInstanceFieldGet() ||
501 instruction_->IsStaticFieldGet() ||
502 instruction_->IsArrayGet() ||
503 instruction_->IsArraySet() ||
504 instruction_->IsLoadClass() ||
505 instruction_->IsLoadString() ||
506 instruction_->IsInstanceOf() ||
507 instruction_->IsCheckCast() ||
508 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
509 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
510 << "Unexpected instruction in read barrier marking slow path: "
511 << instruction_->DebugName();
512
513 __ Bind(GetEntryLabel());
514 // No need to save live registers; it's taken care of by the
515 // entrypoint. Also, there is no need to update the stack mask,
516 // as this runtime call will not trigger a garbage collection.
517 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
518 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
519 (S2 <= ref_reg && ref_reg <= S7) ||
520 (ref_reg == S8)) << ref_reg;
521 // "Compact" slow path, saving two moves.
522 //
523 // Instead of using the standard runtime calling convention (input
524 // and output in A0 and V0 respectively):
525 //
526 // A0 <- ref
527 // V0 <- ReadBarrierMark(A0)
528 // ref <- V0
529 //
530 // we just use rX (the register containing `ref`) as input and output
531 // of a dedicated entrypoint:
532 //
533 // rX <- ReadBarrierMarkRegX(rX)
534 //
535 if (entrypoint_.IsValid()) {
536 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
537 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
538 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
539 __ Nop();
540 } else {
541 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100542 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800543 // This runtime call does not require a stack map.
544 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
545 instruction_,
546 this);
547 }
548 __ Bc(GetExitLabel());
549 }
550
551 private:
552 // The location (register) of the marked object reference.
553 const Location ref_;
554
555 // The location of the entrypoint if already loaded.
556 const Location entrypoint_;
557
558 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
559};
560
561// Slow path marking an object reference `ref` during a read barrier,
562// and if needed, atomically updating the field `obj.field` in the
563// object `obj` holding this reference after marking (contrary to
564// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
565// `obj.field`).
566//
567// This means that after the execution of this slow path, both `ref`
568// and `obj.field` will be up-to-date; i.e., after the flip, both will
569// hold the same to-space reference (unless another thread installed
570// another object reference (different from `ref`) in `obj.field`).
571class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
572 public:
573 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
574 Location ref,
575 GpuRegister obj,
576 Location field_offset,
577 GpuRegister temp1)
578 : SlowPathCodeMIPS64(instruction),
579 ref_(ref),
580 obj_(obj),
581 field_offset_(field_offset),
582 temp1_(temp1) {
583 DCHECK(kEmitCompilerReadBarrier);
584 }
585
586 const char* GetDescription() const OVERRIDE {
587 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
588 }
589
590 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
591 LocationSummary* locations = instruction_->GetLocations();
592 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
593 DCHECK(locations->CanCall());
594 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
595 // This slow path is only used by the UnsafeCASObject intrinsic.
596 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
597 << "Unexpected instruction in read barrier marking and field updating slow path: "
598 << instruction_->DebugName();
599 DCHECK(instruction_->GetLocations()->Intrinsified());
600 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
601 DCHECK(field_offset_.IsRegister()) << field_offset_;
602
603 __ Bind(GetEntryLabel());
604
605 // Save the old reference.
606 // Note that we cannot use AT or TMP to save the old reference, as those
607 // are used by the code that follows, but we need the old reference after
608 // the call to the ReadBarrierMarkRegX entry point.
609 DCHECK_NE(temp1_, AT);
610 DCHECK_NE(temp1_, TMP);
611 __ Move(temp1_, ref_reg);
612
613 // No need to save live registers; it's taken care of by the
614 // entrypoint. Also, there is no need to update the stack mask,
615 // as this runtime call will not trigger a garbage collection.
616 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
617 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
618 (S2 <= ref_reg && ref_reg <= S7) ||
619 (ref_reg == S8)) << ref_reg;
620 // "Compact" slow path, saving two moves.
621 //
622 // Instead of using the standard runtime calling convention (input
623 // and output in A0 and V0 respectively):
624 //
625 // A0 <- ref
626 // V0 <- ReadBarrierMark(A0)
627 // ref <- V0
628 //
629 // we just use rX (the register containing `ref`) as input and output
630 // of a dedicated entrypoint:
631 //
632 // rX <- ReadBarrierMarkRegX(rX)
633 //
634 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100635 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800636 // This runtime call does not require a stack map.
637 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
638 instruction_,
639 this);
640
641 // If the new reference is different from the old reference,
642 // update the field in the holder (`*(obj_ + field_offset_)`).
643 //
644 // Note that this field could also hold a different object, if
645 // another thread had concurrently changed it. In that case, the
646 // the compare-and-set (CAS) loop below would abort, leaving the
647 // field as-is.
648 Mips64Label done;
649 __ Beqc(temp1_, ref_reg, &done);
650
651 // Update the the holder's field atomically. This may fail if
652 // mutator updates before us, but it's OK. This is achieved
653 // using a strong compare-and-set (CAS) operation with relaxed
654 // memory synchronization ordering, where the expected value is
655 // the old reference and the desired value is the new reference.
656
657 // Convenience aliases.
658 GpuRegister base = obj_;
659 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
660 GpuRegister expected = temp1_;
661 GpuRegister value = ref_reg;
662 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
663 GpuRegister tmp = AT; // Value in memory.
664
665 __ Daddu(tmp_ptr, base, offset);
666
667 if (kPoisonHeapReferences) {
668 __ PoisonHeapReference(expected);
669 // Do not poison `value` if it is the same register as
670 // `expected`, which has just been poisoned.
671 if (value != expected) {
672 __ PoisonHeapReference(value);
673 }
674 }
675
676 // do {
677 // tmp = [r_ptr] - expected;
678 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
679
680 Mips64Label loop_head, exit_loop;
681 __ Bind(&loop_head);
682 __ Ll(tmp, tmp_ptr);
683 // The LL instruction sign-extends the 32-bit value, but
684 // 32-bit references must be zero-extended. Zero-extend `tmp`.
685 __ Dext(tmp, tmp, 0, 32);
686 __ Bnec(tmp, expected, &exit_loop);
687 __ Move(tmp, value);
688 __ Sc(tmp, tmp_ptr);
689 __ Beqzc(tmp, &loop_head);
690 __ Bind(&exit_loop);
691
692 if (kPoisonHeapReferences) {
693 __ UnpoisonHeapReference(expected);
694 // Do not unpoison `value` if it is the same register as
695 // `expected`, which has just been unpoisoned.
696 if (value != expected) {
697 __ UnpoisonHeapReference(value);
698 }
699 }
700
701 __ Bind(&done);
702 __ Bc(GetExitLabel());
703 }
704
705 private:
706 // The location (register) of the marked object reference.
707 const Location ref_;
708 // The register containing the object holding the marked object reference field.
709 const GpuRegister obj_;
710 // The location of the offset of the marked reference field within `obj_`.
711 Location field_offset_;
712
713 const GpuRegister temp1_;
714
715 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
716};
717
718// Slow path generating a read barrier for a heap reference.
719class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
720 public:
721 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
722 Location out,
723 Location ref,
724 Location obj,
725 uint32_t offset,
726 Location index)
727 : SlowPathCodeMIPS64(instruction),
728 out_(out),
729 ref_(ref),
730 obj_(obj),
731 offset_(offset),
732 index_(index) {
733 DCHECK(kEmitCompilerReadBarrier);
734 // If `obj` is equal to `out` or `ref`, it means the initial object
735 // has been overwritten by (or after) the heap object reference load
736 // to be instrumented, e.g.:
737 //
738 // __ LoadFromOffset(kLoadWord, out, out, offset);
739 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
740 //
741 // In that case, we have lost the information about the original
742 // object, and the emitted read barrier cannot work properly.
743 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
744 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
745 }
746
747 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
748 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
749 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100750 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800751 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
752 DCHECK(locations->CanCall());
753 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
754 DCHECK(instruction_->IsInstanceFieldGet() ||
755 instruction_->IsStaticFieldGet() ||
756 instruction_->IsArrayGet() ||
757 instruction_->IsInstanceOf() ||
758 instruction_->IsCheckCast() ||
759 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
760 << "Unexpected instruction in read barrier for heap reference slow path: "
761 << instruction_->DebugName();
762
763 __ Bind(GetEntryLabel());
764 SaveLiveRegisters(codegen, locations);
765
766 // We may have to change the index's value, but as `index_` is a
767 // constant member (like other "inputs" of this slow path),
768 // introduce a copy of it, `index`.
769 Location index = index_;
770 if (index_.IsValid()) {
771 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
772 if (instruction_->IsArrayGet()) {
773 // Compute the actual memory offset and store it in `index`.
774 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
775 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
776 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
777 // We are about to change the value of `index_reg` (see the
778 // calls to art::mips64::Mips64Assembler::Sll and
779 // art::mips64::MipsAssembler::Addiu32 below), but it has
780 // not been saved by the previous call to
781 // art::SlowPathCode::SaveLiveRegisters, as it is a
782 // callee-save register --
783 // art::SlowPathCode::SaveLiveRegisters does not consider
784 // callee-save registers, as it has been designed with the
785 // assumption that callee-save registers are supposed to be
786 // handled by the called function. So, as a callee-save
787 // register, `index_reg` _would_ eventually be saved onto
788 // the stack, but it would be too late: we would have
789 // changed its value earlier. Therefore, we manually save
790 // it here into another freely available register,
791 // `free_reg`, chosen of course among the caller-save
792 // registers (as a callee-save `free_reg` register would
793 // exhibit the same problem).
794 //
795 // Note we could have requested a temporary register from
796 // the register allocator instead; but we prefer not to, as
797 // this is a slow path, and we know we can find a
798 // caller-save register that is available.
799 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
800 __ Move(free_reg, index_reg);
801 index_reg = free_reg;
802 index = Location::RegisterLocation(index_reg);
803 } else {
804 // The initial register stored in `index_` has already been
805 // saved in the call to art::SlowPathCode::SaveLiveRegisters
806 // (as it is not a callee-save register), so we can freely
807 // use it.
808 }
809 // Shifting the index value contained in `index_reg` by the scale
810 // factor (2) cannot overflow in practice, as the runtime is
811 // unable to allocate object arrays with a size larger than
812 // 2^26 - 1 (that is, 2^28 - 4 bytes).
813 __ Sll(index_reg, index_reg, TIMES_4);
814 static_assert(
815 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
816 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
817 __ Addiu32(index_reg, index_reg, offset_);
818 } else {
819 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
820 // intrinsics, `index_` is not shifted by a scale factor of 2
821 // (as in the case of ArrayGet), as it is actually an offset
822 // to an object field within an object.
823 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
824 DCHECK(instruction_->GetLocations()->Intrinsified());
825 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
826 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
827 << instruction_->AsInvoke()->GetIntrinsic();
828 DCHECK_EQ(offset_, 0U);
829 DCHECK(index_.IsRegister());
830 }
831 }
832
833 // We're moving two or three locations to locations that could
834 // overlap, so we need a parallel move resolver.
835 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100836 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800837 parallel_move.AddMove(ref_,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800840 nullptr);
841 parallel_move.AddMove(obj_,
842 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100843 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800844 nullptr);
845 if (index.IsValid()) {
846 parallel_move.AddMove(index,
847 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100848 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800849 nullptr);
850 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
851 } else {
852 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
853 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
854 }
855 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
856 instruction_,
857 instruction_->GetDexPc(),
858 this);
859 CheckEntrypointTypes<
860 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
861 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
862
863 RestoreLiveRegisters(codegen, locations);
864 __ Bc(GetExitLabel());
865 }
866
867 const char* GetDescription() const OVERRIDE {
868 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
869 }
870
871 private:
872 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
873 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
874 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
875 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
876 if (i != ref &&
877 i != obj &&
878 !codegen->IsCoreCalleeSaveRegister(i) &&
879 !codegen->IsBlockedCoreRegister(i)) {
880 return static_cast<GpuRegister>(i);
881 }
882 }
883 // We shall never fail to find a free caller-save register, as
884 // there are more than two core caller-save registers on MIPS64
885 // (meaning it is possible to find one which is different from
886 // `ref` and `obj`).
887 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
888 LOG(FATAL) << "Could not find a free caller-save register";
889 UNREACHABLE();
890 }
891
892 const Location out_;
893 const Location ref_;
894 const Location obj_;
895 const uint32_t offset_;
896 // An additional location containing an index to an array.
897 // Only used for HArrayGet and the UnsafeGetObject &
898 // UnsafeGetObjectVolatile intrinsics.
899 const Location index_;
900
901 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
902};
903
904// Slow path generating a read barrier for a GC root.
905class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
906 public:
907 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
908 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
909 DCHECK(kEmitCompilerReadBarrier);
910 }
911
912 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
913 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100914 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800915 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
916 DCHECK(locations->CanCall());
917 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
918 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
919 << "Unexpected instruction in read barrier for GC root slow path: "
920 << instruction_->DebugName();
921
922 __ Bind(GetEntryLabel());
923 SaveLiveRegisters(codegen, locations);
924
925 InvokeRuntimeCallingConvention calling_convention;
926 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
927 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
928 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100929 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800930 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
931 instruction_,
932 instruction_->GetDexPc(),
933 this);
934 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
935 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
936
937 RestoreLiveRegisters(codegen, locations);
938 __ Bc(GetExitLabel());
939 }
940
941 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
942
943 private:
944 const Location out_;
945 const Location root_;
946
947 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
948};
949
Alexey Frunze4dda3372015-06-01 18:31:49 -0700950CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100951 const CompilerOptions& compiler_options,
952 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700953 : CodeGenerator(graph,
954 kNumberOfGpuRegisters,
955 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000956 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700957 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
958 arraysize(kCoreCalleeSaves)),
959 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
960 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100961 compiler_options,
962 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100963 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700964 location_builder_(graph, this),
965 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100966 move_resolver_(graph->GetAllocator(), this),
Vladimir Markoa0431112018-06-25 09:32:54 +0100967 assembler_(graph->GetAllocator(),
968 compiler_options.GetInstructionSetFeatures()->AsMips64InstructionSetFeatures()),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800969 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100970 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800971 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000973 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100974 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000975 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100976 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000977 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100978 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6fd16062018-06-26 11:02:04 +0100979 boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800980 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100981 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800982 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100983 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700984 // Save RA (containing the return address) to mimic Quick.
985 AddAllocatedRegister(Location::RegisterLocation(RA));
986}
987
988#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100989// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
990#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700991#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700992
993void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700994 // Ensure that we fix up branches.
995 __ FinalizeCode();
996
997 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100998 StackMapStream* stack_map_stream = GetStackMapStream();
999 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
David Srbeckyd02b23f2018-05-29 23:27:22 +01001000 uint32_t old_position = stack_map_stream->GetStackMapNativePcOffset(i);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001001 uint32_t new_position = __ GetAdjustedPosition(old_position);
1002 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +01001003 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001004 }
1005
1006 // Adjust pc offsets for the disassembly information.
1007 if (disasm_info_ != nullptr) {
1008 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1009 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1010 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1011 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1012 it.second.start = __ GetAdjustedPosition(it.second.start);
1013 it.second.end = __ GetAdjustedPosition(it.second.end);
1014 }
1015 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1016 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1017 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1018 }
1019 }
1020
Alexey Frunze4dda3372015-06-01 18:31:49 -07001021 CodeGenerator::Finalize(allocator);
1022}
1023
1024Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1025 return codegen_->GetAssembler();
1026}
1027
1028void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001029 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001030 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1031}
1032
1033void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001034 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001035 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1036}
1037
1038void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1039 // Pop reg
1040 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001041 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001042}
1043
1044void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1045 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001046 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001047 __ Sd(GpuRegister(reg), SP, 0);
1048}
1049
1050void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1051 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1052 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1053 // Allocate a scratch register other than TMP, if available.
1054 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1055 // automatically unspilled when the scratch scope object is destroyed).
1056 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1057 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001058 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001059 __ LoadFromOffset(load_type,
1060 GpuRegister(ensure_scratch.GetRegister()),
1061 SP,
1062 index1 + stack_offset);
1063 __ LoadFromOffset(load_type,
1064 TMP,
1065 SP,
1066 index2 + stack_offset);
1067 __ StoreToOffset(store_type,
1068 GpuRegister(ensure_scratch.GetRegister()),
1069 SP,
1070 index2 + stack_offset);
1071 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1072}
1073
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001074void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1075 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1076 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1077 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1078 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1079}
1080
Alexey Frunze4dda3372015-06-01 18:31:49 -07001081static dwarf::Reg DWARFReg(GpuRegister reg) {
1082 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1083}
1084
David Srbeckyba702002016-02-01 18:15:29 +00001085static dwarf::Reg DWARFReg(FpuRegister reg) {
1086 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1087}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001088
1089void CodeGeneratorMIPS64::GenerateFrameEntry() {
1090 __ Bind(&frame_entry_label_);
1091
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001092 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01001093 __ Lhu(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
1094 __ Addiu(TMP, TMP, 1);
1095 __ Sh(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001096 }
1097
Vladimir Marko33bff252017-11-01 14:35:42 +00001098 bool do_overflow_check =
1099 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001100
1101 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001102 __ LoadFromOffset(
1103 kLoadWord,
1104 ZERO,
1105 SP,
1106 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001107 RecordPcInfo(nullptr, 0);
1108 }
1109
Alexey Frunze4dda3372015-06-01 18:31:49 -07001110 if (HasEmptyFrame()) {
1111 return;
1112 }
1113
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001114 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001115 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1116 LOG(FATAL) << "Stack frame larger than "
1117 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001118 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001119
1120 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001121
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001122 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001123 __ IncreaseFrameSize(ofs);
1124
1125 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1126 GpuRegister reg = kCoreCalleeSaves[i];
1127 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001128 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001129 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001130 __ cfi().RelOffset(DWARFReg(reg), ofs);
1131 }
1132 }
1133
1134 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1135 FpuRegister reg = kFpuCalleeSaves[i];
1136 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001137 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001138 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001139 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001140 }
1141 }
1142
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001143 // Save the current method if we need it. Note that we do not
1144 // do this in HCurrentMethod, as the instruction might have been removed
1145 // in the SSA graph.
1146 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001147 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001148 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001149
1150 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1151 // Initialize should_deoptimize flag to 0.
1152 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1153 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001154}
1155
1156void CodeGeneratorMIPS64::GenerateFrameExit() {
1157 __ cfi().RememberState();
1158
Alexey Frunze4dda3372015-06-01 18:31:49 -07001159 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001160 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001161
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001162 // For better instruction scheduling restore RA before other registers.
1163 uint32_t ofs = GetFrameSize();
1164 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001165 GpuRegister reg = kCoreCalleeSaves[i];
1166 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001167 ofs -= kMips64DoublewordSize;
1168 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001169 __ cfi().Restore(DWARFReg(reg));
1170 }
1171 }
1172
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001173 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1174 FpuRegister reg = kFpuCalleeSaves[i];
1175 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1176 ofs -= kMips64DoublewordSize;
1177 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1178 __ cfi().Restore(DWARFReg(reg));
1179 }
1180 }
1181
1182 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001183 }
1184
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001185 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001186
1187 __ cfi().RestoreState();
1188 __ cfi().DefCFAOffset(GetFrameSize());
1189}
1190
1191void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1192 __ Bind(GetLabelOf(block));
1193}
1194
1195void CodeGeneratorMIPS64::MoveLocation(Location destination,
1196 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001197 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001198 if (source.Equals(destination)) {
1199 return;
1200 }
1201
1202 // A valid move can always be inferred from the destination and source
1203 // locations. When moving from and to a register, the argument type can be
1204 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001205 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001206 DCHECK_EQ(unspecified_type, false);
1207
1208 if (destination.IsRegister() || destination.IsFpuRegister()) {
1209 if (unspecified_type) {
1210 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1211 if (source.IsStackSlot() ||
1212 (src_cst != nullptr && (src_cst->IsIntConstant()
1213 || src_cst->IsFloatConstant()
1214 || src_cst->IsNullConstant()))) {
1215 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001216 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001217 } else {
1218 // If the source is a double stack slot or a 64bit constant, a 64bit
1219 // type is appropriate. Else the source is a register, and since the
1220 // type has not been specified, we chose a 64bit type to force a 64bit
1221 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001222 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001223 }
1224 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001225 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1226 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001227 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1228 // Move to GPR/FPR from stack
1229 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001230 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001231 __ LoadFpuFromOffset(load_type,
1232 destination.AsFpuRegister<FpuRegister>(),
1233 SP,
1234 source.GetStackIndex());
1235 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001236 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001237 __ LoadFromOffset(load_type,
1238 destination.AsRegister<GpuRegister>(),
1239 SP,
1240 source.GetStackIndex());
1241 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001242 } else if (source.IsSIMDStackSlot()) {
1243 __ LoadFpuFromOffset(kLoadQuadword,
1244 destination.AsFpuRegister<FpuRegister>(),
1245 SP,
1246 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001247 } else if (source.IsConstant()) {
1248 // Move to GPR/FPR from constant
1249 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001250 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001251 gpr = destination.AsRegister<GpuRegister>();
1252 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001253 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001254 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001255 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001256 gpr = ZERO;
1257 } else {
1258 __ LoadConst32(gpr, value);
1259 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001260 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001261 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001262 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001263 gpr = ZERO;
1264 } else {
1265 __ LoadConst64(gpr, value);
1266 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001267 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001268 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001269 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001270 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001271 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1272 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001273 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001274 if (destination.IsRegister()) {
1275 // Move to GPR from GPR
1276 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1277 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001278 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001279 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001280 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1281 } else {
1282 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1283 }
1284 }
1285 } else if (source.IsFpuRegister()) {
1286 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001287 if (GetGraph()->HasSIMD()) {
1288 __ MoveV(VectorRegisterFrom(destination),
1289 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001290 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001291 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001292 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001293 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1294 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001295 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001296 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1297 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001298 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001299 } else {
1300 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001301 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001302 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1303 } else {
1304 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1305 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001306 }
1307 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001308 } else if (destination.IsSIMDStackSlot()) {
1309 if (source.IsFpuRegister()) {
1310 __ StoreFpuToOffset(kStoreQuadword,
1311 source.AsFpuRegister<FpuRegister>(),
1312 SP,
1313 destination.GetStackIndex());
1314 } else {
1315 DCHECK(source.IsSIMDStackSlot());
1316 __ LoadFpuFromOffset(kLoadQuadword,
1317 FTMP,
1318 SP,
1319 source.GetStackIndex());
1320 __ StoreFpuToOffset(kStoreQuadword,
1321 FTMP,
1322 SP,
1323 destination.GetStackIndex());
1324 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001325 } else { // The destination is not a register. It must be a stack slot.
1326 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1327 if (source.IsRegister() || source.IsFpuRegister()) {
1328 if (unspecified_type) {
1329 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001330 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001331 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001332 dst_type =
1333 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001334 }
1335 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001336 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1337 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001338 // Move to stack from GPR/FPR
1339 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1340 if (source.IsRegister()) {
1341 __ StoreToOffset(store_type,
1342 source.AsRegister<GpuRegister>(),
1343 SP,
1344 destination.GetStackIndex());
1345 } else {
1346 __ StoreFpuToOffset(store_type,
1347 source.AsFpuRegister<FpuRegister>(),
1348 SP,
1349 destination.GetStackIndex());
1350 }
1351 } else if (source.IsConstant()) {
1352 // Move to stack from constant
1353 HConstant* src_cst = source.GetConstant();
1354 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001355 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001356 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001357 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1358 if (value != 0) {
1359 gpr = TMP;
1360 __ LoadConst32(gpr, value);
1361 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001362 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001363 DCHECK(destination.IsDoubleStackSlot());
1364 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1365 if (value != 0) {
1366 gpr = TMP;
1367 __ LoadConst64(gpr, value);
1368 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001369 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001370 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001371 } else {
1372 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1373 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1374 // Move to stack from stack
1375 if (destination.IsStackSlot()) {
1376 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1377 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1378 } else {
1379 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1380 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1381 }
1382 }
1383 }
1384}
1385
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001386void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001387 DCHECK(!loc1.IsConstant());
1388 DCHECK(!loc2.IsConstant());
1389
1390 if (loc1.Equals(loc2)) {
1391 return;
1392 }
1393
1394 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1395 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001396 bool is_simd1 = loc1.IsSIMDStackSlot();
1397 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001398 bool is_fp_reg1 = loc1.IsFpuRegister();
1399 bool is_fp_reg2 = loc2.IsFpuRegister();
1400
1401 if (loc2.IsRegister() && loc1.IsRegister()) {
1402 // Swap 2 GPRs
1403 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1404 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1405 __ Move(TMP, r2);
1406 __ Move(r2, r1);
1407 __ Move(r1, TMP);
1408 } else if (is_fp_reg2 && is_fp_reg1) {
1409 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001410 if (GetGraph()->HasSIMD()) {
1411 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1412 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1413 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001414 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001415 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1416 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1417 if (type == DataType::Type::kFloat32) {
1418 __ MovS(FTMP, r1);
1419 __ MovS(r1, r2);
1420 __ MovS(r2, FTMP);
1421 } else {
1422 DCHECK_EQ(type, DataType::Type::kFloat64);
1423 __ MovD(FTMP, r1);
1424 __ MovD(r1, r2);
1425 __ MovD(r2, FTMP);
1426 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001427 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001428 } else if (is_slot1 != is_slot2) {
1429 // Swap GPR/FPR and stack slot
1430 Location reg_loc = is_slot1 ? loc2 : loc1;
1431 Location mem_loc = is_slot1 ? loc1 : loc2;
1432 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1433 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001434 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001435 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1436 if (reg_loc.IsFpuRegister()) {
1437 __ StoreFpuToOffset(store_type,
1438 reg_loc.AsFpuRegister<FpuRegister>(),
1439 SP,
1440 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001441 if (mem_loc.IsStackSlot()) {
1442 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1443 } else {
1444 DCHECK(mem_loc.IsDoubleStackSlot());
1445 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1446 }
1447 } else {
1448 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1449 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1450 }
1451 } else if (is_slot1 && is_slot2) {
1452 move_resolver_.Exchange(loc1.GetStackIndex(),
1453 loc2.GetStackIndex(),
1454 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001455 } else if (is_simd1 && is_simd2) {
1456 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1457 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1458 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1459 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1460 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1461 __ StoreFpuToOffset(kStoreQuadword,
1462 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1463 SP,
1464 mem_loc.GetStackIndex());
1465 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001466 } else {
1467 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1468 }
1469}
1470
Calin Juravle175dc732015-08-25 15:42:32 +01001471void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1472 DCHECK(location.IsRegister());
1473 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1474}
1475
Calin Juravlee460d1d2015-09-29 04:52:17 +01001476void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1477 if (location.IsRegister()) {
1478 locations->AddTemp(location);
1479 } else {
1480 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1481 }
1482}
1483
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001484void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1485 GpuRegister value,
1486 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001487 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001488 GpuRegister card = AT;
1489 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001490 if (value_can_be_null) {
1491 __ Beqzc(value, &done);
1492 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001493 __ LoadFromOffset(kLoadDoubleword,
1494 card,
1495 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001496 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001497 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1498 __ Daddu(temp, card, temp);
1499 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001500 if (value_can_be_null) {
1501 __ Bind(&done);
1502 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001503}
1504
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001505template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001506inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1507 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001508 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001509 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001510 const DexFile* dex_file = info.target_dex_file;
Alexey Frunze19f6c692016-11-30 19:19:55 -08001511 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001512 DCHECK(info.label.IsBound());
1513 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1514 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1515 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001516 linker_patches->push_back(Factory(literal_offset, dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001517 }
1518}
1519
Vladimir Marko6fd16062018-06-26 11:02:04 +01001520template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)>
1521linker::LinkerPatch NoDexFileAdapter(size_t literal_offset,
1522 const DexFile* target_dex_file,
1523 uint32_t pc_insn_offset,
1524 uint32_t boot_image_offset) {
1525 DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null.
1526 return Factory(literal_offset, pc_insn_offset, boot_image_offset);
Vladimir Markob066d432018-01-03 13:14:37 +00001527}
1528
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001529void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001530 DCHECK(linker_patches->empty());
1531 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001532 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001533 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001534 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001535 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001536 boot_image_string_patches_.size() +
Vladimir Marko6fd16062018-06-26 11:02:04 +01001537 string_bss_entry_patches_.size() +
1538 boot_image_intrinsic_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001539 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001540 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001541 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001542 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001543 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001544 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001545 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001546 boot_image_string_patches_, linker_patches);
Vladimir Marko6fd16062018-06-26 11:02:04 +01001547 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>(
1548 boot_image_intrinsic_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001549 } else {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001550 EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>(
Vladimir Markob066d432018-01-03 13:14:37 +00001551 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001552 DCHECK(boot_image_type_patches_.empty());
1553 DCHECK(boot_image_string_patches_.empty());
Vladimir Marko6fd16062018-06-26 11:02:04 +01001554 DCHECK(boot_image_intrinsic_patches_.empty());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001555 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001556 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1557 method_bss_entry_patches_, linker_patches);
1558 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1559 type_bss_entry_patches_, linker_patches);
1560 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1561 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001562 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001563}
1564
Vladimir Marko6fd16062018-06-26 11:02:04 +01001565CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageIntrinsicPatch(
1566 uint32_t intrinsic_data,
1567 const PcRelativePatchInfo* info_high) {
1568 return NewPcRelativePatch(
1569 /* dex_file */ nullptr, intrinsic_data, info_high, &boot_image_intrinsic_patches_);
1570}
1571
Vladimir Markob066d432018-01-03 13:14:37 +00001572CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageRelRoPatch(
1573 uint32_t boot_image_offset,
1574 const PcRelativePatchInfo* info_high) {
1575 return NewPcRelativePatch(
1576 /* dex_file */ nullptr, boot_image_offset, info_high, &boot_image_method_patches_);
1577}
1578
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001579CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001580 MethodReference target_method,
1581 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001582 return NewPcRelativePatch(
1583 target_method.dex_file, target_method.index, info_high, &boot_image_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001584}
1585
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001586CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001587 MethodReference target_method,
1588 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001589 return NewPcRelativePatch(
1590 target_method.dex_file, target_method.index, info_high, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001591}
1592
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001593CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001594 const DexFile& dex_file,
1595 dex::TypeIndex type_index,
1596 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001597 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &boot_image_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001598}
1599
Vladimir Marko1998cd02017-01-13 13:02:58 +00001600CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001601 const DexFile& dex_file,
1602 dex::TypeIndex type_index,
1603 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001604 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001605}
1606
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001607CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001608 const DexFile& dex_file,
1609 dex::StringIndex string_index,
1610 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001611 return NewPcRelativePatch(
1612 &dex_file, string_index.index_, info_high, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001613}
1614
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001615CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1616 const DexFile& dex_file,
1617 dex::StringIndex string_index,
1618 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001619 return NewPcRelativePatch(&dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001620}
1621
Alexey Frunze19f6c692016-11-30 19:19:55 -08001622CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001623 const DexFile* dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001624 uint32_t offset_or_index,
1625 const PcRelativePatchInfo* info_high,
1626 ArenaDeque<PcRelativePatchInfo>* patches) {
1627 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001628 return &patches->back();
1629}
1630
Alexey Frunzef63f5692016-12-13 17:43:11 -08001631Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1632 return map->GetOrCreate(
1633 value,
1634 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1635}
1636
Alexey Frunze19f6c692016-11-30 19:19:55 -08001637Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1638 return uint64_literals_.GetOrCreate(
1639 value,
1640 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1641}
1642
Alexey Frunzef63f5692016-12-13 17:43:11 -08001643Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001644 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001645}
1646
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001647void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1648 GpuRegister out,
1649 PcRelativePatchInfo* info_low) {
1650 DCHECK(!info_high->patch_info_high);
1651 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001652 // Add the high half of a 32-bit offset to PC.
1653 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001654 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001655 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001656 if (info_low != nullptr) {
1657 DCHECK_EQ(info_low->patch_info_high, info_high);
1658 __ Bind(&info_low->label);
1659 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001660}
1661
Vladimir Marko6fd16062018-06-26 11:02:04 +01001662void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_reference) {
1663 if (GetCompilerOptions().IsBootImage()) {
1664 PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference);
1665 PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high);
1666 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1667 __ Daddiu(reg, AT, /* placeholder */ 0x5678);
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001668 } else if (Runtime::Current()->IsAotCompiler()) {
Vladimir Marko6fd16062018-06-26 11:02:04 +01001669 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_reference);
1670 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_reference, info_high);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001671 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1672 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
1673 __ Lwu(reg, AT, /* placeholder */ 0x5678);
1674 } else {
Vladimir Marko8e524ad2018-07-13 10:27:43 +01001675 DCHECK(Runtime::Current()->UseJitCompilation());
Vladimir Markoeebb8212018-06-05 14:57:24 +01001676 gc::Heap* heap = Runtime::Current()->GetHeap();
1677 DCHECK(!heap->GetBootImageSpaces().empty());
1678 uintptr_t address =
Vladimir Marko6fd16062018-06-26 11:02:04 +01001679 reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference);
Vladimir Markoeebb8212018-06-05 14:57:24 +01001680 __ LoadLiteral(reg, kLoadDoubleword, DeduplicateBootImageAddressLiteral(address));
1681 }
1682}
1683
Vladimir Marko6fd16062018-06-26 11:02:04 +01001684void CodeGeneratorMIPS64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke,
1685 uint32_t boot_image_offset) {
1686 DCHECK(invoke->IsStatic());
1687 InvokeRuntimeCallingConvention calling_convention;
1688 GpuRegister argument = calling_convention.GetRegisterAt(0);
1689 if (GetCompilerOptions().IsBootImage()) {
1690 DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference);
1691 // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative.
1692 MethodReference target_method = invoke->GetTargetMethod();
1693 dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_;
1694 PcRelativePatchInfo* info_high = NewBootImageTypePatch(*target_method.dex_file, type_idx);
1695 PcRelativePatchInfo* info_low =
1696 NewBootImageTypePatch(*target_method.dex_file, type_idx, info_high);
1697 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
1698 __ Daddiu(argument, AT, /* placeholder */ 0x5678);
1699 } else {
1700 LoadBootImageAddress(argument, boot_image_offset);
1701 }
1702 InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
1703 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
1704}
1705
Alexey Frunze627c1a02017-01-30 19:28:14 -08001706Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1707 dex::StringIndex string_index,
1708 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001709 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001710 return jit_string_patches_.GetOrCreate(
1711 StringReference(&dex_file, string_index),
1712 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1713}
1714
1715Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1716 dex::TypeIndex type_index,
1717 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001718 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001719 return jit_class_patches_.GetOrCreate(
1720 TypeReference(&dex_file, type_index),
1721 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1722}
1723
1724void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1725 const uint8_t* roots_data,
1726 const Literal* literal,
1727 uint64_t index_in_table) const {
1728 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1729 uintptr_t address =
1730 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1731 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1732}
1733
1734void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1735 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001736 const StringReference& string_reference = entry.first;
1737 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001738 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001739 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001740 }
1741 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001742 const TypeReference& type_reference = entry.first;
1743 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001744 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001745 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001746 }
1747}
1748
David Brazdil58282f42016-01-14 12:45:10 +00001749void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001750 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1751 blocked_core_registers_[ZERO] = true;
1752 blocked_core_registers_[K0] = true;
1753 blocked_core_registers_[K1] = true;
1754 blocked_core_registers_[GP] = true;
1755 blocked_core_registers_[SP] = true;
1756 blocked_core_registers_[RA] = true;
1757
Lazar Trsicd9672662015-09-03 17:33:01 +02001758 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1759 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001760 blocked_core_registers_[AT] = true;
1761 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001762 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001763 blocked_fpu_registers_[FTMP] = true;
1764
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001765 if (GetInstructionSetFeatures().HasMsa()) {
1766 // To be used just for MSA instructions.
1767 blocked_fpu_registers_[FTMP2] = true;
1768 }
1769
Alexey Frunze4dda3372015-06-01 18:31:49 -07001770 // Reserve suspend and thread registers.
1771 blocked_core_registers_[S0] = true;
1772 blocked_core_registers_[TR] = true;
1773
1774 // Reserve T9 for function calls
1775 blocked_core_registers_[T9] = true;
1776
Goran Jakovljevic782be112016-06-21 12:39:04 +02001777 if (GetGraph()->IsDebuggable()) {
1778 // Stubs do not save callee-save floating point registers. If the graph
1779 // is debuggable, we need to deal with these registers differently. For
1780 // now, just block them.
1781 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1782 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1783 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001784 }
1785}
1786
Alexey Frunze4dda3372015-06-01 18:31:49 -07001787size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1788 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001789 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001790}
1791
1792size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1793 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001794 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001795}
1796
1797size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001798 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1799 FpuRegister(reg_id),
1800 SP,
1801 stack_index);
1802 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001803}
1804
1805size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001806 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1807 FpuRegister(reg_id),
1808 SP,
1809 stack_index);
1810 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001811}
1812
1813void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001814 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001815}
1816
1817void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001818 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001819}
1820
Vladimir Markoa0431112018-06-25 09:32:54 +01001821const Mips64InstructionSetFeatures& CodeGeneratorMIPS64::GetInstructionSetFeatures() const {
1822 return *GetCompilerOptions().GetInstructionSetFeatures()->AsMips64InstructionSetFeatures();
1823}
1824
Calin Juravle175dc732015-08-25 15:42:32 +01001825void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001826 HInstruction* instruction,
1827 uint32_t dex_pc,
1828 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001829 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001830 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001831 if (EntrypointRequiresStackMap(entrypoint)) {
1832 RecordPcInfo(instruction, dex_pc, slow_path);
1833 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001834}
1835
Alexey Frunze15958152017-02-09 19:08:30 -08001836void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1837 HInstruction* instruction,
1838 SlowPathCode* slow_path) {
1839 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1840 GenerateInvokeRuntime(entry_point_offset);
1841}
1842
1843void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1844 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1845 __ Jalr(T9);
1846 __ Nop();
1847}
1848
Alexey Frunze4dda3372015-06-01 18:31:49 -07001849void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1850 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001851 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1852 const size_t status_byte_offset =
1853 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1854 constexpr uint32_t shifted_initialized_value =
1855 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1856
1857 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
Lena Djokic3177e102018-02-28 11:32:40 +01001858 __ Sltiu(TMP, TMP, shifted_initialized_value);
1859 __ Bnezc(TMP, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001860 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1861 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001862 __ Bind(slow_path->GetExitLabel());
1863}
1864
Vladimir Marko175e7862018-03-27 09:03:13 +00001865void InstructionCodeGeneratorMIPS64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
1866 GpuRegister temp) {
1867 uint32_t path_to_root = check->GetBitstringPathToRoot();
1868 uint32_t mask = check->GetBitstringMask();
1869 DCHECK(IsPowerOfTwo(mask + 1));
1870 size_t mask_bits = WhichPowerOf2(mask + 1);
1871
1872 if (mask_bits == 16u) {
1873 // Load only the bitstring part of the status word.
1874 __ LoadFromOffset(
1875 kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
1876 // Compare the bitstring bits using XOR.
1877 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1878 } else {
1879 // /* uint32_t */ temp = temp->status_
1880 __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
1881 // Compare the bitstring bits using XOR.
1882 if (IsUint<16>(path_to_root)) {
1883 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1884 } else {
1885 __ LoadConst32(TMP, path_to_root);
1886 __ Xor(temp, temp, TMP);
1887 }
1888 // Shift out bits that do not contribute to the comparison.
1889 __ Sll(temp, temp, 32 - mask_bits);
1890 }
1891}
1892
Alexey Frunze4dda3372015-06-01 18:31:49 -07001893void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1894 __ Sync(0); // only stype 0 is supported
1895}
1896
1897void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1898 HBasicBlock* successor) {
1899 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001900 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1901
1902 if (slow_path == nullptr) {
1903 slow_path =
1904 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1905 instruction->SetSlowPath(slow_path);
1906 codegen_->AddSlowPath(slow_path);
1907 if (successor != nullptr) {
1908 DCHECK(successor->IsLoopHeader());
1909 }
1910 } else {
1911 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1912 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001913
1914 __ LoadFromOffset(kLoadUnsignedHalfword,
1915 TMP,
1916 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001917 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001918 if (successor == nullptr) {
1919 __ Bnezc(TMP, slow_path->GetEntryLabel());
1920 __ Bind(slow_path->GetReturnLabel());
1921 } else {
1922 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001923 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001924 // slow_path will return to GetLabelOf(successor).
1925 }
1926}
1927
1928InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1929 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001930 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001931 assembler_(codegen->GetAssembler()),
1932 codegen_(codegen) {}
1933
1934void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1935 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001936 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001937 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001938 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001939 case DataType::Type::kInt32:
1940 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001941 locations->SetInAt(0, Location::RequiresRegister());
1942 HInstruction* right = instruction->InputAt(1);
1943 bool can_use_imm = false;
1944 if (right->IsConstant()) {
1945 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1946 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1947 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001948 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001949 DCHECK(instruction->IsAdd() || instruction->IsSub());
1950 bool single_use = right->GetUses().HasExactlyOneElement();
1951 if (instruction->IsSub()) {
1952 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1953 imm = -imm;
1954 }
1955 }
1956 if (type == DataType::Type::kInt32) {
1957 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1958 } else {
1959 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1960 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001961 }
1962 }
1963 if (can_use_imm)
1964 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1965 else
1966 locations->SetInAt(1, Location::RequiresRegister());
1967 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1968 }
1969 break;
1970
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001971 case DataType::Type::kFloat32:
1972 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001973 locations->SetInAt(0, Location::RequiresFpuRegister());
1974 locations->SetInAt(1, Location::RequiresFpuRegister());
1975 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1976 break;
1977
1978 default:
1979 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1980 }
1981}
1982
1983void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001984 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001985 LocationSummary* locations = instruction->GetLocations();
1986
1987 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001988 case DataType::Type::kInt32:
1989 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001990 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1991 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1992 Location rhs_location = locations->InAt(1);
1993
1994 GpuRegister rhs_reg = ZERO;
1995 int64_t rhs_imm = 0;
1996 bool use_imm = rhs_location.IsConstant();
1997 if (use_imm) {
1998 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1999 } else {
2000 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2001 }
2002
2003 if (instruction->IsAnd()) {
2004 if (use_imm)
2005 __ Andi(dst, lhs, rhs_imm);
2006 else
2007 __ And(dst, lhs, rhs_reg);
2008 } else if (instruction->IsOr()) {
2009 if (use_imm)
2010 __ Ori(dst, lhs, rhs_imm);
2011 else
2012 __ Or(dst, lhs, rhs_reg);
2013 } else if (instruction->IsXor()) {
2014 if (use_imm)
2015 __ Xori(dst, lhs, rhs_imm);
2016 else
2017 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002018 } else if (instruction->IsAdd() || instruction->IsSub()) {
2019 if (instruction->IsSub()) {
2020 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002021 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002022 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01002023 if (use_imm) {
2024 if (IsInt<16>(rhs_imm)) {
2025 __ Addiu(dst, lhs, rhs_imm);
2026 } else {
2027 int16_t rhs_imm_high = High16Bits(rhs_imm);
2028 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2029 if (rhs_imm_low < 0) {
2030 rhs_imm_high += 1;
2031 }
2032 __ Aui(dst, lhs, rhs_imm_high);
2033 if (rhs_imm_low != 0) {
2034 __ Addiu(dst, dst, rhs_imm_low);
2035 }
2036 }
2037 } else {
2038 if (instruction->IsAdd()) {
2039 __ Addu(dst, lhs, rhs_reg);
2040 } else {
2041 DCHECK(instruction->IsSub());
2042 __ Subu(dst, lhs, rhs_reg);
2043 }
2044 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002045 } else {
Lena Djokic38530172017-11-16 11:11:50 +01002046 if (use_imm) {
2047 if (IsInt<16>(rhs_imm)) {
2048 __ Daddiu(dst, lhs, rhs_imm);
2049 } else if (IsInt<32>(rhs_imm)) {
2050 int16_t rhs_imm_high = High16Bits(rhs_imm);
2051 int16_t rhs_imm_low = Low16Bits(rhs_imm);
2052 bool overflow_hi16 = false;
2053 if (rhs_imm_low < 0) {
2054 rhs_imm_high += 1;
2055 overflow_hi16 = (rhs_imm_high == -32768);
2056 }
2057 __ Daui(dst, lhs, rhs_imm_high);
2058 if (rhs_imm_low != 0) {
2059 __ Daddiu(dst, dst, rhs_imm_low);
2060 }
2061 if (overflow_hi16) {
2062 __ Dahi(dst, 1);
2063 }
2064 } else {
2065 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
2066 if (rhs_imm_low < 0) {
2067 rhs_imm += (INT64_C(1) << 16);
2068 }
2069 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
2070 if (rhs_imm_upper < 0) {
2071 rhs_imm += (INT64_C(1) << 32);
2072 }
2073 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
2074 if (rhs_imm_high < 0) {
2075 rhs_imm += (INT64_C(1) << 48);
2076 }
2077 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
2078 GpuRegister tmp = lhs;
2079 if (rhs_imm_low != 0) {
2080 __ Daddiu(dst, tmp, rhs_imm_low);
2081 tmp = dst;
2082 }
2083 // Dahi and Dati must use the same input and output register, so we have to initialize
2084 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
2085 // Daui(dst, lhs, 0).
2086 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
2087 __ Daui(dst, tmp, rhs_imm_upper);
2088 }
2089 if (rhs_imm_high != 0) {
2090 __ Dahi(dst, rhs_imm_high);
2091 }
2092 if (rhs_imm_top != 0) {
2093 __ Dati(dst, rhs_imm_top);
2094 }
2095 }
2096 } else if (instruction->IsAdd()) {
2097 __ Daddu(dst, lhs, rhs_reg);
2098 } else {
2099 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002100 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002101 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002102 }
2103 }
2104 break;
2105 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002106 case DataType::Type::kFloat32:
2107 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002108 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2109 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2110 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2111 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002112 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002113 __ AddS(dst, lhs, rhs);
2114 else
2115 __ AddD(dst, lhs, rhs);
2116 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002117 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002118 __ SubS(dst, lhs, rhs);
2119 else
2120 __ SubD(dst, lhs, rhs);
2121 } else {
2122 LOG(FATAL) << "Unexpected floating-point binary operation";
2123 }
2124 break;
2125 }
2126 default:
2127 LOG(FATAL) << "Unexpected binary operation type " << type;
2128 }
2129}
2130
2131void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002132 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002133
Vladimir Markoca6fff82017-10-03 14:49:14 +01002134 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002135 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002136 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002137 case DataType::Type::kInt32:
2138 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002139 locations->SetInAt(0, Location::RequiresRegister());
2140 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002141 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002142 break;
2143 }
2144 default:
2145 LOG(FATAL) << "Unexpected shift type " << type;
2146 }
2147}
2148
2149void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002150 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002151 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002152 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002153
2154 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002155 case DataType::Type::kInt32:
2156 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002157 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2158 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2159 Location rhs_location = locations->InAt(1);
2160
2161 GpuRegister rhs_reg = ZERO;
2162 int64_t rhs_imm = 0;
2163 bool use_imm = rhs_location.IsConstant();
2164 if (use_imm) {
2165 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2166 } else {
2167 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2168 }
2169
2170 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002171 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002172 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002173
Alexey Frunze92d90602015-12-18 18:16:36 -08002174 if (shift_value == 0) {
2175 if (dst != lhs) {
2176 __ Move(dst, lhs);
2177 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002178 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002179 if (instr->IsShl()) {
2180 __ Sll(dst, lhs, shift_value);
2181 } else if (instr->IsShr()) {
2182 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002183 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002184 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002185 } else {
2186 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002187 }
2188 } else {
2189 if (shift_value < 32) {
2190 if (instr->IsShl()) {
2191 __ Dsll(dst, lhs, shift_value);
2192 } else if (instr->IsShr()) {
2193 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002194 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002195 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002196 } else {
2197 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 }
2199 } else {
2200 shift_value -= 32;
2201 if (instr->IsShl()) {
2202 __ Dsll32(dst, lhs, shift_value);
2203 } else if (instr->IsShr()) {
2204 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002205 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002206 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002207 } else {
2208 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002209 }
2210 }
2211 }
2212 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002213 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002214 if (instr->IsShl()) {
2215 __ Sllv(dst, lhs, rhs_reg);
2216 } else if (instr->IsShr()) {
2217 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002218 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002219 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002220 } else {
2221 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002222 }
2223 } else {
2224 if (instr->IsShl()) {
2225 __ Dsllv(dst, lhs, rhs_reg);
2226 } else if (instr->IsShr()) {
2227 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002228 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002229 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002230 } else {
2231 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002232 }
2233 }
2234 }
2235 break;
2236 }
2237 default:
2238 LOG(FATAL) << "Unexpected shift operation type " << type;
2239 }
2240}
2241
2242void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2243 HandleBinaryOp(instruction);
2244}
2245
2246void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2247 HandleBinaryOp(instruction);
2248}
2249
2250void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2251 HandleBinaryOp(instruction);
2252}
2253
2254void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2255 HandleBinaryOp(instruction);
2256}
2257
2258void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002259 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002260 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002261 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002262 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002263 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2264 object_array_get_with_read_barrier
2265 ? LocationSummary::kCallOnSlowPath
2266 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002267 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2268 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2269 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002270 locations->SetInAt(0, Location::RequiresRegister());
2271 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002272 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002273 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2274 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002275 // The output overlaps in the case of an object array get with
2276 // read barriers enabled: we do not want the move to overwrite the
2277 // array's location, as we need it to emit the read barrier.
2278 locations->SetOut(Location::RequiresRegister(),
2279 object_array_get_with_read_barrier
2280 ? Location::kOutputOverlap
2281 : Location::kNoOutputOverlap);
2282 }
2283 // We need a temporary register for the read barrier marking slow
2284 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2285 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002286 bool temp_needed = instruction->GetIndex()->IsConstant()
2287 ? !kBakerReadBarrierThunksEnableForFields
2288 : !kBakerReadBarrierThunksEnableForArrays;
2289 if (temp_needed) {
2290 locations->AddTemp(Location::RequiresRegister());
2291 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002292 }
2293}
2294
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002295static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2296 auto null_checker = [codegen, instruction]() {
2297 codegen->MaybeRecordImplicitNullCheck(instruction);
2298 };
2299 return null_checker;
2300}
2301
Alexey Frunze4dda3372015-06-01 18:31:49 -07002302void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2303 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002304 Location obj_loc = locations->InAt(0);
2305 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2306 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002307 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002308 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002309 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002311 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002312 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2313 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002314 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002315 case DataType::Type::kBool:
2316 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002317 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002318 if (index.IsConstant()) {
2319 size_t offset =
2320 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002321 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002322 } else {
2323 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002324 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002325 }
2326 break;
2327 }
2328
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002329 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002330 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002331 if (index.IsConstant()) {
2332 size_t offset =
2333 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002334 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002335 } else {
2336 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002337 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002338 }
2339 break;
2340 }
2341
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002342 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002343 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002344 if (maybe_compressed_char_at) {
2345 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002346 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002347 __ Dext(TMP, TMP, 0, 1);
2348 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2349 "Expecting 0=compressed, 1=uncompressed");
2350 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002351 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002352 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2353 if (maybe_compressed_char_at) {
2354 Mips64Label uncompressed_load, done;
2355 __ Bnezc(TMP, &uncompressed_load);
2356 __ LoadFromOffset(kLoadUnsignedByte,
2357 out,
2358 obj,
2359 data_offset + (const_index << TIMES_1));
2360 __ Bc(&done);
2361 __ Bind(&uncompressed_load);
2362 __ LoadFromOffset(kLoadUnsignedHalfword,
2363 out,
2364 obj,
2365 data_offset + (const_index << TIMES_2));
2366 __ Bind(&done);
2367 } else {
2368 __ LoadFromOffset(kLoadUnsignedHalfword,
2369 out,
2370 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002371 data_offset + (const_index << TIMES_2),
2372 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002373 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002374 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002375 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2376 if (maybe_compressed_char_at) {
2377 Mips64Label uncompressed_load, done;
2378 __ Bnezc(TMP, &uncompressed_load);
2379 __ Daddu(TMP, obj, index_reg);
2380 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2381 __ Bc(&done);
2382 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002383 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002384 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2385 __ Bind(&done);
2386 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002387 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002388 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002389 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002390 }
2391 break;
2392 }
2393
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002394 case DataType::Type::kInt16: {
2395 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2396 if (index.IsConstant()) {
2397 size_t offset =
2398 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2399 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2400 } else {
2401 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2402 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2403 }
2404 break;
2405 }
2406
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002407 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002408 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002409 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002410 LoadOperandType load_type =
2411 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002412 if (index.IsConstant()) {
2413 size_t offset =
2414 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002415 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002416 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002417 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002418 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002419 }
2420 break;
2421 }
2422
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002423 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002424 static_assert(
2425 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2426 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2427 // /* HeapReference<Object> */ out =
2428 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2429 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002430 bool temp_needed = index.IsConstant()
2431 ? !kBakerReadBarrierThunksEnableForFields
2432 : !kBakerReadBarrierThunksEnableForArrays;
2433 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002434 // Note that a potential implicit null check is handled in this
2435 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002436 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2437 if (index.IsConstant()) {
2438 // Array load with a constant index can be treated as a field load.
2439 size_t offset =
2440 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2441 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2442 out_loc,
2443 obj,
2444 offset,
2445 temp,
2446 /* needs_null_check */ false);
2447 } else {
2448 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2449 out_loc,
2450 obj,
2451 data_offset,
2452 index,
2453 temp,
2454 /* needs_null_check */ false);
2455 }
Alexey Frunze15958152017-02-09 19:08:30 -08002456 } else {
2457 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2458 if (index.IsConstant()) {
2459 size_t offset =
2460 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2461 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2462 // If read barriers are enabled, emit read barriers other than
2463 // Baker's using a slow path (and also unpoison the loaded
2464 // reference, if heap poisoning is enabled).
2465 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2466 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002467 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002468 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2469 // If read barriers are enabled, emit read barriers other than
2470 // Baker's using a slow path (and also unpoison the loaded
2471 // reference, if heap poisoning is enabled).
2472 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2473 out_loc,
2474 out_loc,
2475 obj_loc,
2476 data_offset,
2477 index);
2478 }
2479 }
2480 break;
2481 }
2482
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002483 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002484 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002485 if (index.IsConstant()) {
2486 size_t offset =
2487 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002488 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002489 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002490 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002491 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002492 }
2493 break;
2494 }
2495
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002496 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002497 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002498 if (index.IsConstant()) {
2499 size_t offset =
2500 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002501 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002502 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002503 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002504 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002505 }
2506 break;
2507 }
2508
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002509 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002510 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002511 if (index.IsConstant()) {
2512 size_t offset =
2513 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002514 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002515 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002516 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002517 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002518 }
2519 break;
2520 }
2521
Aart Bik66c158e2018-01-31 12:55:04 -08002522 case DataType::Type::kUint32:
2523 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002524 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002525 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2526 UNREACHABLE();
2527 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002528}
2529
2530void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002531 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002532 locations->SetInAt(0, Location::RequiresRegister());
2533 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2534}
2535
2536void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2537 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002538 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002539 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2540 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2541 __ LoadFromOffset(kLoadWord, out, obj, offset);
2542 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002543 // Mask out compression flag from String's array length.
2544 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2545 __ Srl(out, out, 1u);
2546 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002547}
2548
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002549Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2550 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2551 ? Location::ConstantLocation(instruction->AsConstant())
2552 : Location::RequiresRegister();
2553}
2554
2555Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2556 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2557 // We can store a non-zero float or double constant without first loading it into the FPU,
2558 // but we should only prefer this if the constant has a single use.
2559 if (instruction->IsConstant() &&
2560 (instruction->AsConstant()->IsZeroBitPattern() ||
2561 instruction->GetUses().HasExactlyOneElement())) {
2562 return Location::ConstantLocation(instruction->AsConstant());
2563 // Otherwise fall through and require an FPU register for the constant.
2564 }
2565 return Location::RequiresFpuRegister();
2566}
2567
Alexey Frunze4dda3372015-06-01 18:31:49 -07002568void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002569 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002570
2571 bool needs_write_barrier =
2572 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2573 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2574
Vladimir Markoca6fff82017-10-03 14:49:14 +01002575 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002576 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002577 may_need_runtime_call_for_type_check ?
2578 LocationSummary::kCallOnSlowPath :
2579 LocationSummary::kNoCall);
2580
2581 locations->SetInAt(0, Location::RequiresRegister());
2582 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002583 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002584 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002585 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002586 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2587 }
2588 if (needs_write_barrier) {
2589 // Temporary register for the write barrier.
2590 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002591 }
2592}
2593
2594void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2595 LocationSummary* locations = instruction->GetLocations();
2596 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2597 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002598 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002599 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002600 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002601 bool needs_write_barrier =
2602 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002603 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002604 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002605
2606 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002607 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002608 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002609 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002610 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002611 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002612 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002613 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002614 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2615 }
2616 if (value_location.IsConstant()) {
2617 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2618 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2619 } else {
2620 GpuRegister value = value_location.AsRegister<GpuRegister>();
2621 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002622 }
2623 break;
2624 }
2625
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002626 case DataType::Type::kUint16:
2627 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002628 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002629 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002630 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002631 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002632 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002633 }
2634 if (value_location.IsConstant()) {
2635 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2636 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2637 } else {
2638 GpuRegister value = value_location.AsRegister<GpuRegister>();
2639 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002640 }
2641 break;
2642 }
2643
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002644 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002645 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2646 if (index.IsConstant()) {
2647 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2648 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002649 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002650 }
2651 if (value_location.IsConstant()) {
2652 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2653 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2654 } else {
2655 GpuRegister value = value_location.AsRegister<GpuRegister>();
2656 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2657 }
2658 break;
2659 }
2660
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002661 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002662 if (value_location.IsConstant()) {
2663 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002664 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002665 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002666 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002667 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002668 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002669 }
Alexey Frunze15958152017-02-09 19:08:30 -08002670 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2671 DCHECK_EQ(value, 0);
2672 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2673 DCHECK(!needs_write_barrier);
2674 DCHECK(!may_need_runtime_call_for_type_check);
2675 break;
2676 }
2677
2678 DCHECK(needs_write_barrier);
2679 GpuRegister value = value_location.AsRegister<GpuRegister>();
2680 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2681 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2682 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2683 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2684 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2685 Mips64Label done;
2686 SlowPathCodeMIPS64* slow_path = nullptr;
2687
2688 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002689 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002690 codegen_->AddSlowPath(slow_path);
2691 if (instruction->GetValueCanBeNull()) {
2692 Mips64Label non_zero;
2693 __ Bnezc(value, &non_zero);
2694 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2695 if (index.IsConstant()) {
2696 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002697 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002698 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002699 }
Alexey Frunze15958152017-02-09 19:08:30 -08002700 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2701 __ Bc(&done);
2702 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002703 }
Alexey Frunze15958152017-02-09 19:08:30 -08002704
2705 // Note that when read barriers are enabled, the type checks
2706 // are performed without read barriers. This is fine, even in
2707 // the case where a class object is in the from-space after
2708 // the flip, as a comparison involving such a type would not
2709 // produce a false positive; it may of course produce a false
2710 // negative, in which case we would take the ArraySet slow
2711 // path.
2712
2713 // /* HeapReference<Class> */ temp1 = obj->klass_
2714 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2715 __ MaybeUnpoisonHeapReference(temp1);
2716
2717 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2718 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2719 // /* HeapReference<Class> */ temp2 = value->klass_
2720 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2721 // If heap poisoning is enabled, no need to unpoison `temp1`
2722 // nor `temp2`, as we are comparing two poisoned references.
2723
2724 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2725 Mips64Label do_put;
2726 __ Beqc(temp1, temp2, &do_put);
2727 // If heap poisoning is enabled, the `temp1` reference has
2728 // not been unpoisoned yet; unpoison it now.
2729 __ MaybeUnpoisonHeapReference(temp1);
2730
2731 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2732 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2733 // If heap poisoning is enabled, no need to unpoison
2734 // `temp1`, as we are comparing against null below.
2735 __ Bnezc(temp1, slow_path->GetEntryLabel());
2736 __ Bind(&do_put);
2737 } else {
2738 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2739 }
2740 }
2741
2742 GpuRegister source = value;
2743 if (kPoisonHeapReferences) {
2744 // Note that in the case where `value` is a null reference,
2745 // we do not enter this block, as a null reference does not
2746 // need poisoning.
2747 __ Move(temp1, value);
2748 __ PoisonHeapReference(temp1);
2749 source = temp1;
2750 }
2751
2752 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2753 if (index.IsConstant()) {
2754 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002755 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002756 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002757 }
2758 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2759
2760 if (!may_need_runtime_call_for_type_check) {
2761 codegen_->MaybeRecordImplicitNullCheck(instruction);
2762 }
2763
2764 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2765
2766 if (done.IsLinked()) {
2767 __ Bind(&done);
2768 }
2769
2770 if (slow_path != nullptr) {
2771 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002772 }
2773 break;
2774 }
2775
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002776 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002777 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002778 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002779 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002780 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002781 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002782 }
2783 if (value_location.IsConstant()) {
2784 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2785 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2786 } else {
2787 GpuRegister value = value_location.AsRegister<GpuRegister>();
2788 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002789 }
2790 break;
2791 }
2792
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002793 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002794 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002795 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002796 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002797 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002798 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002799 }
2800 if (value_location.IsConstant()) {
2801 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2802 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2803 } else {
2804 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2805 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002806 }
2807 break;
2808 }
2809
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002810 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002811 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002812 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002813 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002814 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002815 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002816 }
2817 if (value_location.IsConstant()) {
2818 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2819 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2820 } else {
2821 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2822 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002823 }
2824 break;
2825 }
2826
Aart Bik66c158e2018-01-31 12:55:04 -08002827 case DataType::Type::kUint32:
2828 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002829 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002830 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2831 UNREACHABLE();
2832 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002833}
2834
2835void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002836 RegisterSet caller_saves = RegisterSet::Empty();
2837 InvokeRuntimeCallingConvention calling_convention;
2838 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2839 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2840 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002841
2842 HInstruction* index = instruction->InputAt(0);
2843 HInstruction* length = instruction->InputAt(1);
2844
2845 bool const_index = false;
2846 bool const_length = false;
2847
2848 if (index->IsConstant()) {
2849 if (length->IsConstant()) {
2850 const_index = true;
2851 const_length = true;
2852 } else {
2853 int32_t index_value = index->AsIntConstant()->GetValue();
2854 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2855 const_index = true;
2856 }
2857 }
2858 } else if (length->IsConstant()) {
2859 int32_t length_value = length->AsIntConstant()->GetValue();
2860 if (IsUint<15>(length_value)) {
2861 const_length = true;
2862 }
2863 }
2864
2865 locations->SetInAt(0, const_index
2866 ? Location::ConstantLocation(index->AsConstant())
2867 : Location::RequiresRegister());
2868 locations->SetInAt(1, const_length
2869 ? Location::ConstantLocation(length->AsConstant())
2870 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002871}
2872
2873void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2874 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002875 Location index_loc = locations->InAt(0);
2876 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002877
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002878 if (length_loc.IsConstant()) {
2879 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2880 if (index_loc.IsConstant()) {
2881 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2882 if (index < 0 || index >= length) {
2883 BoundsCheckSlowPathMIPS64* slow_path =
2884 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2885 codegen_->AddSlowPath(slow_path);
2886 __ Bc(slow_path->GetEntryLabel());
2887 } else {
2888 // Nothing to be done.
2889 }
2890 return;
2891 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002892
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002893 BoundsCheckSlowPathMIPS64* slow_path =
2894 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2895 codegen_->AddSlowPath(slow_path);
2896 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2897 if (length == 0) {
2898 __ Bc(slow_path->GetEntryLabel());
2899 } else if (length == 1) {
2900 __ Bnezc(index, slow_path->GetEntryLabel());
2901 } else {
2902 DCHECK(IsUint<15>(length)) << length;
2903 __ Sltiu(TMP, index, length);
2904 __ Beqzc(TMP, slow_path->GetEntryLabel());
2905 }
2906 } else {
2907 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2908 BoundsCheckSlowPathMIPS64* slow_path =
2909 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2910 codegen_->AddSlowPath(slow_path);
2911 if (index_loc.IsConstant()) {
2912 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2913 if (index < 0) {
2914 __ Bc(slow_path->GetEntryLabel());
2915 } else if (index == 0) {
2916 __ Blezc(length, slow_path->GetEntryLabel());
2917 } else {
2918 DCHECK(IsInt<16>(index + 1)) << index;
2919 __ Sltiu(TMP, length, index + 1);
2920 __ Bnezc(TMP, slow_path->GetEntryLabel());
2921 }
2922 } else {
2923 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2924 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2925 }
2926 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002927}
2928
Alexey Frunze15958152017-02-09 19:08:30 -08002929// Temp is used for read barrier.
2930static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2931 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002932 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002933 (kUseBakerReadBarrier ||
2934 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2935 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2936 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2937 return 1;
2938 }
2939 return 0;
2940}
2941
2942// Extra temp is used for read barrier.
2943static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2944 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2945}
2946
Alexey Frunze4dda3372015-06-01 18:31:49 -07002947void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002948 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002949 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002950 LocationSummary* locations =
2951 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002952 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00002953 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
2954 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2955 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2956 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
2957 } else {
2958 locations->SetInAt(1, Location::RequiresRegister());
2959 }
Alexey Frunze15958152017-02-09 19:08:30 -08002960 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002961}
2962
2963void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002964 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002965 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002966 Location obj_loc = locations->InAt(0);
2967 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00002968 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08002969 Location temp_loc = locations->GetTemp(0);
2970 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2971 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2972 DCHECK_LE(num_temps, 2u);
2973 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002974 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2975 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2976 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2977 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2978 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2979 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2980 const uint32_t object_array_data_offset =
2981 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2982 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002983
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002984 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002985 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002986 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2987 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002988 codegen_->AddSlowPath(slow_path);
2989
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002990 // Avoid this check if we know `obj` is not null.
2991 if (instruction->MustDoNullCheck()) {
2992 __ Beqzc(obj, &done);
2993 }
2994
2995 switch (type_check_kind) {
2996 case TypeCheckKind::kExactCheck:
2997 case TypeCheckKind::kArrayCheck: {
2998 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002999 GenerateReferenceLoadTwoRegisters(instruction,
3000 temp_loc,
3001 obj_loc,
3002 class_offset,
3003 maybe_temp2_loc,
3004 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003005 // Jump to slow path for throwing the exception or doing a
3006 // more involved array check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003007 __ Bnec(temp, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003008 break;
3009 }
3010
3011 case TypeCheckKind::kAbstractClassCheck: {
3012 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003013 GenerateReferenceLoadTwoRegisters(instruction,
3014 temp_loc,
3015 obj_loc,
3016 class_offset,
3017 maybe_temp2_loc,
3018 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003019 // If the class is abstract, we eagerly fetch the super class of the
3020 // object to avoid doing a comparison we know will fail.
3021 Mips64Label loop;
3022 __ Bind(&loop);
3023 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003024 GenerateReferenceLoadOneRegister(instruction,
3025 temp_loc,
3026 super_offset,
3027 maybe_temp2_loc,
3028 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003029 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3030 // exception.
3031 __ Beqzc(temp, slow_path->GetEntryLabel());
3032 // Otherwise, compare the classes.
Vladimir Marko175e7862018-03-27 09:03:13 +00003033 __ Bnec(temp, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003034 break;
3035 }
3036
3037 case TypeCheckKind::kClassHierarchyCheck: {
3038 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003039 GenerateReferenceLoadTwoRegisters(instruction,
3040 temp_loc,
3041 obj_loc,
3042 class_offset,
3043 maybe_temp2_loc,
3044 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003045 // Walk over the class hierarchy to find a match.
3046 Mips64Label loop;
3047 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00003048 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003049 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003050 GenerateReferenceLoadOneRegister(instruction,
3051 temp_loc,
3052 super_offset,
3053 maybe_temp2_loc,
3054 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003055 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3056 // exception. Otherwise, jump to the beginning of the loop.
3057 __ Bnezc(temp, &loop);
3058 __ Bc(slow_path->GetEntryLabel());
3059 break;
3060 }
3061
3062 case TypeCheckKind::kArrayObjectCheck: {
3063 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003064 GenerateReferenceLoadTwoRegisters(instruction,
3065 temp_loc,
3066 obj_loc,
3067 class_offset,
3068 maybe_temp2_loc,
3069 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003070 // Do an exact check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003071 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003072 // Otherwise, we need to check that the object's class is a non-primitive array.
3073 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003074 GenerateReferenceLoadOneRegister(instruction,
3075 temp_loc,
3076 component_offset,
3077 maybe_temp2_loc,
3078 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003079 // If the component type is null, jump to the slow path to throw the exception.
3080 __ Beqzc(temp, slow_path->GetEntryLabel());
3081 // Otherwise, the object is indeed an array, further check that this component
3082 // type is not a primitive type.
3083 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3084 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3085 __ Bnezc(temp, slow_path->GetEntryLabel());
3086 break;
3087 }
3088
3089 case TypeCheckKind::kUnresolvedCheck:
3090 // We always go into the type check slow path for the unresolved check case.
3091 // We cannot directly call the CheckCast runtime entry point
3092 // without resorting to a type checking slow path here (i.e. by
3093 // calling InvokeRuntime directly), as it would require to
3094 // assign fixed registers for the inputs of this HInstanceOf
3095 // instruction (following the runtime calling convention), which
3096 // might be cluttered by the potential first read barrier
3097 // emission at the beginning of this method.
3098 __ Bc(slow_path->GetEntryLabel());
3099 break;
3100
3101 case TypeCheckKind::kInterfaceCheck: {
3102 // Avoid read barriers to improve performance of the fast path. We can not get false
3103 // positives by doing this.
3104 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003105 GenerateReferenceLoadTwoRegisters(instruction,
3106 temp_loc,
3107 obj_loc,
3108 class_offset,
3109 maybe_temp2_loc,
3110 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003111 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003112 GenerateReferenceLoadTwoRegisters(instruction,
3113 temp_loc,
3114 temp_loc,
3115 iftable_offset,
3116 maybe_temp2_loc,
3117 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003118 // Iftable is never null.
3119 __ Lw(TMP, temp, array_length_offset);
3120 // Loop through the iftable and check if any class matches.
3121 Mips64Label loop;
3122 __ Bind(&loop);
3123 __ Beqzc(TMP, slow_path->GetEntryLabel());
3124 __ Lwu(AT, temp, object_array_data_offset);
3125 __ MaybeUnpoisonHeapReference(AT);
3126 // Go to next interface.
3127 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3128 __ Addiu(TMP, TMP, -2);
3129 // Compare the classes and continue the loop if they do not match.
Vladimir Marko175e7862018-03-27 09:03:13 +00003130 __ Bnec(AT, cls.AsRegister<GpuRegister>(), &loop);
3131 break;
3132 }
3133
3134 case TypeCheckKind::kBitstringCheck: {
3135 // /* HeapReference<Class> */ temp = obj->klass_
3136 GenerateReferenceLoadTwoRegisters(instruction,
3137 temp_loc,
3138 obj_loc,
3139 class_offset,
3140 maybe_temp2_loc,
3141 kWithoutReadBarrier);
3142
3143 GenerateBitstringTypeCheckCompare(instruction, temp);
3144 __ Bnezc(temp, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003145 break;
3146 }
3147 }
3148
3149 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003150 __ Bind(slow_path->GetExitLabel());
3151}
3152
3153void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3154 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003155 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003156 locations->SetInAt(0, Location::RequiresRegister());
3157 if (check->HasUses()) {
3158 locations->SetOut(Location::SameAsFirstInput());
3159 }
Vladimir Marko3232dbb2018-07-25 15:42:46 +01003160 // Rely on the type initialization to save everything we need.
3161 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003162}
3163
3164void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3165 // We assume the class is not null.
Vladimir Markoa9f303c2018-07-20 16:43:56 +01003166 SlowPathCodeMIPS64* slow_path =
3167 new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(check->GetLoadClass(), check);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003168 codegen_->AddSlowPath(slow_path);
3169 GenerateClassInitializationCheck(slow_path,
3170 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3171}
3172
3173void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003174 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003175
Vladimir Markoca6fff82017-10-03 14:49:14 +01003176 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003177
3178 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003179 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003180 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003181 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003182 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003183 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003184 case DataType::Type::kInt32:
3185 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003186 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003187 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003188 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3189 break;
3190
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003191 case DataType::Type::kFloat32:
3192 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003193 locations->SetInAt(0, Location::RequiresFpuRegister());
3194 locations->SetInAt(1, Location::RequiresFpuRegister());
3195 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003196 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003197
3198 default:
3199 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3200 }
3201}
3202
3203void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3204 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003205 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003206 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003207
3208 // 0 if: left == right
3209 // 1 if: left > right
3210 // -1 if: left < right
3211 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003212 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003213 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003214 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003215 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003216 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003217 case DataType::Type::kInt32:
3218 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003219 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003220 Location rhs_location = locations->InAt(1);
3221 bool use_imm = rhs_location.IsConstant();
3222 GpuRegister rhs = ZERO;
3223 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003224 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003225 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3226 if (value != 0) {
3227 rhs = AT;
3228 __ LoadConst64(rhs, value);
3229 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003230 } else {
3231 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3232 if (value != 0) {
3233 rhs = AT;
3234 __ LoadConst32(rhs, value);
3235 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003236 }
3237 } else {
3238 rhs = rhs_location.AsRegister<GpuRegister>();
3239 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003240 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003241 __ Slt(res, rhs, lhs);
3242 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003243 break;
3244 }
3245
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003246 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003247 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3248 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3249 Mips64Label done;
3250 __ CmpEqS(FTMP, lhs, rhs);
3251 __ LoadConst32(res, 0);
3252 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003253 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003254 __ CmpLtS(FTMP, lhs, rhs);
3255 __ LoadConst32(res, -1);
3256 __ Bc1nez(FTMP, &done);
3257 __ LoadConst32(res, 1);
3258 } else {
3259 __ CmpLtS(FTMP, rhs, lhs);
3260 __ LoadConst32(res, 1);
3261 __ Bc1nez(FTMP, &done);
3262 __ LoadConst32(res, -1);
3263 }
3264 __ Bind(&done);
3265 break;
3266 }
3267
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003268 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003269 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3270 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3271 Mips64Label done;
3272 __ CmpEqD(FTMP, lhs, rhs);
3273 __ LoadConst32(res, 0);
3274 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003275 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003276 __ CmpLtD(FTMP, lhs, rhs);
3277 __ LoadConst32(res, -1);
3278 __ Bc1nez(FTMP, &done);
3279 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003280 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003281 __ CmpLtD(FTMP, rhs, lhs);
3282 __ LoadConst32(res, 1);
3283 __ Bc1nez(FTMP, &done);
3284 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003285 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003286 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003287 break;
3288 }
3289
3290 default:
3291 LOG(FATAL) << "Unimplemented compare type " << in_type;
3292 }
3293}
3294
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003295void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003296 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003297 switch (instruction->InputAt(0)->GetType()) {
3298 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003299 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003300 locations->SetInAt(0, Location::RequiresRegister());
3301 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3302 break;
3303
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003304 case DataType::Type::kFloat32:
3305 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003306 locations->SetInAt(0, Location::RequiresFpuRegister());
3307 locations->SetInAt(1, Location::RequiresFpuRegister());
3308 break;
3309 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003310 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003311 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3312 }
3313}
3314
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003315void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003316 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003317 return;
3318 }
3319
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003320 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003321 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003322 switch (type) {
3323 default:
3324 // Integer case.
3325 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3326 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003327 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003328 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3329 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003330 case DataType::Type::kFloat32:
3331 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003332 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3333 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003334 }
3335}
3336
Alexey Frunzec857c742015-09-23 15:12:39 -07003337void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3338 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003339 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003340
3341 LocationSummary* locations = instruction->GetLocations();
3342 Location second = locations->InAt(1);
3343 DCHECK(second.IsConstant());
3344
3345 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3346 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3347 int64_t imm = Int64FromConstant(second.GetConstant());
3348 DCHECK(imm == 1 || imm == -1);
3349
3350 if (instruction->IsRem()) {
3351 __ Move(out, ZERO);
3352 } else {
3353 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003354 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003355 __ Subu(out, ZERO, dividend);
3356 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003357 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003358 __ Dsubu(out, ZERO, dividend);
3359 }
3360 } else if (out != dividend) {
3361 __ Move(out, dividend);
3362 }
3363 }
3364}
3365
3366void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3367 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003368 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003369
3370 LocationSummary* locations = instruction->GetLocations();
3371 Location second = locations->InAt(1);
3372 DCHECK(second.IsConstant());
3373
3374 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3375 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3376 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003377 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003378 int ctz_imm = CTZ(abs_imm);
3379
3380 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003381 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003382 if (ctz_imm == 1) {
3383 // Fast path for division by +/-2, which is very common.
3384 __ Srl(TMP, dividend, 31);
3385 } else {
3386 __ Sra(TMP, dividend, 31);
3387 __ Srl(TMP, TMP, 32 - ctz_imm);
3388 }
3389 __ Addu(out, dividend, TMP);
3390 __ Sra(out, out, ctz_imm);
3391 if (imm < 0) {
3392 __ Subu(out, ZERO, out);
3393 }
3394 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003395 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003396 if (ctz_imm == 1) {
3397 // Fast path for division by +/-2, which is very common.
3398 __ Dsrl32(TMP, dividend, 31);
3399 } else {
3400 __ Dsra32(TMP, dividend, 31);
3401 if (ctz_imm > 32) {
3402 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3403 } else {
3404 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3405 }
3406 }
3407 __ Daddu(out, dividend, TMP);
3408 if (ctz_imm < 32) {
3409 __ Dsra(out, out, ctz_imm);
3410 } else {
3411 __ Dsra32(out, out, ctz_imm - 32);
3412 }
3413 if (imm < 0) {
3414 __ Dsubu(out, ZERO, out);
3415 }
3416 }
3417 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003418 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003419 if (ctz_imm == 1) {
3420 // Fast path for modulo +/-2, which is very common.
3421 __ Sra(TMP, dividend, 31);
3422 __ Subu(out, dividend, TMP);
3423 __ Andi(out, out, 1);
3424 __ Addu(out, out, TMP);
3425 } else {
3426 __ Sra(TMP, dividend, 31);
3427 __ Srl(TMP, TMP, 32 - ctz_imm);
3428 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003429 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003430 __ Subu(out, out, TMP);
3431 }
3432 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003433 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003434 if (ctz_imm == 1) {
3435 // Fast path for modulo +/-2, which is very common.
3436 __ Dsra32(TMP, dividend, 31);
3437 __ Dsubu(out, dividend, TMP);
3438 __ Andi(out, out, 1);
3439 __ Daddu(out, out, TMP);
3440 } else {
3441 __ Dsra32(TMP, dividend, 31);
3442 if (ctz_imm > 32) {
3443 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3444 } else {
3445 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3446 }
3447 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003448 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003449 __ Dsubu(out, out, TMP);
3450 }
3451 }
3452 }
3453}
3454
3455void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3456 DCHECK(instruction->IsDiv() || instruction->IsRem());
3457
3458 LocationSummary* locations = instruction->GetLocations();
3459 Location second = locations->InAt(1);
3460 DCHECK(second.IsConstant());
3461
3462 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3463 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3464 int64_t imm = Int64FromConstant(second.GetConstant());
3465
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003466 DataType::Type type = instruction->GetResultType();
3467 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003468
3469 int64_t magic;
3470 int shift;
3471 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003472 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003473 &magic,
3474 &shift);
3475
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003476 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003477 __ LoadConst32(TMP, magic);
3478 __ MuhR6(TMP, dividend, TMP);
3479
3480 if (imm > 0 && magic < 0) {
3481 __ Addu(TMP, TMP, dividend);
3482 } else if (imm < 0 && magic > 0) {
3483 __ Subu(TMP, TMP, dividend);
3484 }
3485
3486 if (shift != 0) {
3487 __ Sra(TMP, TMP, shift);
3488 }
3489
3490 if (instruction->IsDiv()) {
3491 __ Sra(out, TMP, 31);
3492 __ Subu(out, TMP, out);
3493 } else {
3494 __ Sra(AT, TMP, 31);
3495 __ Subu(AT, TMP, AT);
3496 __ LoadConst32(TMP, imm);
3497 __ MulR6(TMP, AT, TMP);
3498 __ Subu(out, dividend, TMP);
3499 }
3500 } else {
3501 __ LoadConst64(TMP, magic);
3502 __ Dmuh(TMP, dividend, TMP);
3503
3504 if (imm > 0 && magic < 0) {
3505 __ Daddu(TMP, TMP, dividend);
3506 } else if (imm < 0 && magic > 0) {
3507 __ Dsubu(TMP, TMP, dividend);
3508 }
3509
3510 if (shift >= 32) {
3511 __ Dsra32(TMP, TMP, shift - 32);
3512 } else if (shift > 0) {
3513 __ Dsra(TMP, TMP, shift);
3514 }
3515
3516 if (instruction->IsDiv()) {
3517 __ Dsra32(out, TMP, 31);
3518 __ Dsubu(out, TMP, out);
3519 } else {
3520 __ Dsra32(AT, TMP, 31);
3521 __ Dsubu(AT, TMP, AT);
3522 __ LoadConst64(TMP, imm);
3523 __ Dmul(TMP, AT, TMP);
3524 __ Dsubu(out, dividend, TMP);
3525 }
3526 }
3527}
3528
3529void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3530 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003531 DataType::Type type = instruction->GetResultType();
3532 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003533
3534 LocationSummary* locations = instruction->GetLocations();
3535 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3536 Location second = locations->InAt(1);
3537
3538 if (second.IsConstant()) {
3539 int64_t imm = Int64FromConstant(second.GetConstant());
3540 if (imm == 0) {
3541 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3542 } else if (imm == 1 || imm == -1) {
3543 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003544 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003545 DivRemByPowerOfTwo(instruction);
3546 } else {
3547 DCHECK(imm <= -2 || imm >= 2);
3548 GenerateDivRemWithAnyConstant(instruction);
3549 }
3550 } else {
3551 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3552 GpuRegister divisor = second.AsRegister<GpuRegister>();
3553 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003554 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003555 __ DivR6(out, dividend, divisor);
3556 else
3557 __ Ddiv(out, dividend, divisor);
3558 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003559 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003560 __ ModR6(out, dividend, divisor);
3561 else
3562 __ Dmod(out, dividend, divisor);
3563 }
3564 }
3565}
3566
Alexey Frunze4dda3372015-06-01 18:31:49 -07003567void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3568 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003569 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003570 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003571 case DataType::Type::kInt32:
3572 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003573 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003574 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003575 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3576 break;
3577
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003578 case DataType::Type::kFloat32:
3579 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003580 locations->SetInAt(0, Location::RequiresFpuRegister());
3581 locations->SetInAt(1, Location::RequiresFpuRegister());
3582 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3583 break;
3584
3585 default:
3586 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3587 }
3588}
3589
3590void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003591 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003592 LocationSummary* locations = instruction->GetLocations();
3593
3594 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003595 case DataType::Type::kInt32:
3596 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003597 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003598 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003599 case DataType::Type::kFloat32:
3600 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003601 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3602 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3603 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003604 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003605 __ DivS(dst, lhs, rhs);
3606 else
3607 __ DivD(dst, lhs, rhs);
3608 break;
3609 }
3610 default:
3611 LOG(FATAL) << "Unexpected div type " << type;
3612 }
3613}
3614
3615void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003616 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003617 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003618}
3619
3620void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3621 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003622 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003623 codegen_->AddSlowPath(slow_path);
3624 Location value = instruction->GetLocations()->InAt(0);
3625
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003626 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003627
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003628 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003629 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003630 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003631 }
3632
3633 if (value.IsConstant()) {
3634 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3635 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003636 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003637 } else {
3638 // A division by a non-null constant is valid. We don't need to perform
3639 // any check, so simply fall through.
3640 }
3641 } else {
3642 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3643 }
3644}
3645
3646void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3647 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003648 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003649 locations->SetOut(Location::ConstantLocation(constant));
3650}
3651
3652void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3653 // Will be generated at use site.
3654}
3655
3656void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3657 exit->SetLocations(nullptr);
3658}
3659
3660void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3661}
3662
3663void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3664 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003665 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003666 locations->SetOut(Location::ConstantLocation(constant));
3667}
3668
3669void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3670 // Will be generated at use site.
3671}
3672
David Brazdilfc6a86a2015-06-26 10:33:45 +00003673void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003674 if (successor->IsExitBlock()) {
3675 DCHECK(got->GetPrevious()->AlwaysThrows());
3676 return; // no code needed
3677 }
3678
Alexey Frunze4dda3372015-06-01 18:31:49 -07003679 HBasicBlock* block = got->GetBlock();
3680 HInstruction* previous = got->GetPrevious();
3681 HLoopInformation* info = block->GetLoopInformation();
3682
3683 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01003684 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3685 __ Ld(AT, SP, kCurrentMethodStackOffset);
3686 __ Lhu(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3687 __ Addiu(TMP, TMP, 1);
3688 __ Sh(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3689 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003690 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3691 return;
3692 }
3693 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3694 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3695 }
3696 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003697 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003698 }
3699}
3700
David Brazdilfc6a86a2015-06-26 10:33:45 +00003701void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3702 got->SetLocations(nullptr);
3703}
3704
3705void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3706 HandleGoto(got, got->GetSuccessor());
3707}
3708
3709void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3710 try_boundary->SetLocations(nullptr);
3711}
3712
3713void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3714 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3715 if (!successor->IsExitBlock()) {
3716 HandleGoto(try_boundary, successor);
3717 }
3718}
3719
Alexey Frunze299a9392015-12-08 16:08:02 -08003720void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3721 bool is64bit,
3722 LocationSummary* locations) {
3723 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3724 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3725 Location rhs_location = locations->InAt(1);
3726 GpuRegister rhs_reg = ZERO;
3727 int64_t rhs_imm = 0;
3728 bool use_imm = rhs_location.IsConstant();
3729 if (use_imm) {
3730 if (is64bit) {
3731 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3732 } else {
3733 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3734 }
3735 } else {
3736 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3737 }
3738 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3739
3740 switch (cond) {
3741 case kCondEQ:
3742 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003743 if (use_imm && IsInt<16>(-rhs_imm)) {
3744 if (rhs_imm == 0) {
3745 if (cond == kCondEQ) {
3746 __ Sltiu(dst, lhs, 1);
3747 } else {
3748 __ Sltu(dst, ZERO, lhs);
3749 }
3750 } else {
3751 if (is64bit) {
3752 __ Daddiu(dst, lhs, -rhs_imm);
3753 } else {
3754 __ Addiu(dst, lhs, -rhs_imm);
3755 }
3756 if (cond == kCondEQ) {
3757 __ Sltiu(dst, dst, 1);
3758 } else {
3759 __ Sltu(dst, ZERO, dst);
3760 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003761 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003762 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003763 if (use_imm && IsUint<16>(rhs_imm)) {
3764 __ Xori(dst, lhs, rhs_imm);
3765 } else {
3766 if (use_imm) {
3767 rhs_reg = TMP;
3768 __ LoadConst64(rhs_reg, rhs_imm);
3769 }
3770 __ Xor(dst, lhs, rhs_reg);
3771 }
3772 if (cond == kCondEQ) {
3773 __ Sltiu(dst, dst, 1);
3774 } else {
3775 __ Sltu(dst, ZERO, dst);
3776 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003777 }
3778 break;
3779
3780 case kCondLT:
3781 case kCondGE:
3782 if (use_imm && IsInt<16>(rhs_imm)) {
3783 __ Slti(dst, lhs, rhs_imm);
3784 } else {
3785 if (use_imm) {
3786 rhs_reg = TMP;
3787 __ LoadConst64(rhs_reg, rhs_imm);
3788 }
3789 __ Slt(dst, lhs, rhs_reg);
3790 }
3791 if (cond == kCondGE) {
3792 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3793 // only the slt instruction but no sge.
3794 __ Xori(dst, dst, 1);
3795 }
3796 break;
3797
3798 case kCondLE:
3799 case kCondGT:
3800 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3801 // Simulate lhs <= rhs via lhs < rhs + 1.
3802 __ Slti(dst, lhs, rhs_imm_plus_one);
3803 if (cond == kCondGT) {
3804 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3805 // only the slti instruction but no sgti.
3806 __ Xori(dst, dst, 1);
3807 }
3808 } else {
3809 if (use_imm) {
3810 rhs_reg = TMP;
3811 __ LoadConst64(rhs_reg, rhs_imm);
3812 }
3813 __ Slt(dst, rhs_reg, lhs);
3814 if (cond == kCondLE) {
3815 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3816 // only the slt instruction but no sle.
3817 __ Xori(dst, dst, 1);
3818 }
3819 }
3820 break;
3821
3822 case kCondB:
3823 case kCondAE:
3824 if (use_imm && IsInt<16>(rhs_imm)) {
3825 // Sltiu sign-extends its 16-bit immediate operand before
3826 // the comparison and thus lets us compare directly with
3827 // unsigned values in the ranges [0, 0x7fff] and
3828 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3829 __ Sltiu(dst, lhs, rhs_imm);
3830 } else {
3831 if (use_imm) {
3832 rhs_reg = TMP;
3833 __ LoadConst64(rhs_reg, rhs_imm);
3834 }
3835 __ Sltu(dst, lhs, rhs_reg);
3836 }
3837 if (cond == kCondAE) {
3838 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3839 // only the sltu instruction but no sgeu.
3840 __ Xori(dst, dst, 1);
3841 }
3842 break;
3843
3844 case kCondBE:
3845 case kCondA:
3846 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3847 // Simulate lhs <= rhs via lhs < rhs + 1.
3848 // Note that this only works if rhs + 1 does not overflow
3849 // to 0, hence the check above.
3850 // Sltiu sign-extends its 16-bit immediate operand before
3851 // the comparison and thus lets us compare directly with
3852 // unsigned values in the ranges [0, 0x7fff] and
3853 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3854 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3855 if (cond == kCondA) {
3856 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3857 // only the sltiu instruction but no sgtiu.
3858 __ Xori(dst, dst, 1);
3859 }
3860 } else {
3861 if (use_imm) {
3862 rhs_reg = TMP;
3863 __ LoadConst64(rhs_reg, rhs_imm);
3864 }
3865 __ Sltu(dst, rhs_reg, lhs);
3866 if (cond == kCondBE) {
3867 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3868 // only the sltu instruction but no sleu.
3869 __ Xori(dst, dst, 1);
3870 }
3871 }
3872 break;
3873 }
3874}
3875
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003876bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3877 bool is64bit,
3878 LocationSummary* input_locations,
3879 GpuRegister dst) {
3880 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3881 Location rhs_location = input_locations->InAt(1);
3882 GpuRegister rhs_reg = ZERO;
3883 int64_t rhs_imm = 0;
3884 bool use_imm = rhs_location.IsConstant();
3885 if (use_imm) {
3886 if (is64bit) {
3887 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3888 } else {
3889 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3890 }
3891 } else {
3892 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3893 }
3894 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3895
3896 switch (cond) {
3897 case kCondEQ:
3898 case kCondNE:
3899 if (use_imm && IsInt<16>(-rhs_imm)) {
3900 if (is64bit) {
3901 __ Daddiu(dst, lhs, -rhs_imm);
3902 } else {
3903 __ Addiu(dst, lhs, -rhs_imm);
3904 }
3905 } else if (use_imm && IsUint<16>(rhs_imm)) {
3906 __ Xori(dst, lhs, rhs_imm);
3907 } else {
3908 if (use_imm) {
3909 rhs_reg = TMP;
3910 __ LoadConst64(rhs_reg, rhs_imm);
3911 }
3912 __ Xor(dst, lhs, rhs_reg);
3913 }
3914 return (cond == kCondEQ);
3915
3916 case kCondLT:
3917 case kCondGE:
3918 if (use_imm && IsInt<16>(rhs_imm)) {
3919 __ Slti(dst, lhs, rhs_imm);
3920 } else {
3921 if (use_imm) {
3922 rhs_reg = TMP;
3923 __ LoadConst64(rhs_reg, rhs_imm);
3924 }
3925 __ Slt(dst, lhs, rhs_reg);
3926 }
3927 return (cond == kCondGE);
3928
3929 case kCondLE:
3930 case kCondGT:
3931 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3932 // Simulate lhs <= rhs via lhs < rhs + 1.
3933 __ Slti(dst, lhs, rhs_imm_plus_one);
3934 return (cond == kCondGT);
3935 } else {
3936 if (use_imm) {
3937 rhs_reg = TMP;
3938 __ LoadConst64(rhs_reg, rhs_imm);
3939 }
3940 __ Slt(dst, rhs_reg, lhs);
3941 return (cond == kCondLE);
3942 }
3943
3944 case kCondB:
3945 case kCondAE:
3946 if (use_imm && IsInt<16>(rhs_imm)) {
3947 // Sltiu sign-extends its 16-bit immediate operand before
3948 // the comparison and thus lets us compare directly with
3949 // unsigned values in the ranges [0, 0x7fff] and
3950 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3951 __ Sltiu(dst, lhs, rhs_imm);
3952 } else {
3953 if (use_imm) {
3954 rhs_reg = TMP;
3955 __ LoadConst64(rhs_reg, rhs_imm);
3956 }
3957 __ Sltu(dst, lhs, rhs_reg);
3958 }
3959 return (cond == kCondAE);
3960
3961 case kCondBE:
3962 case kCondA:
3963 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3964 // Simulate lhs <= rhs via lhs < rhs + 1.
3965 // Note that this only works if rhs + 1 does not overflow
3966 // to 0, hence the check above.
3967 // Sltiu sign-extends its 16-bit immediate operand before
3968 // the comparison and thus lets us compare directly with
3969 // unsigned values in the ranges [0, 0x7fff] and
3970 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3971 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3972 return (cond == kCondA);
3973 } else {
3974 if (use_imm) {
3975 rhs_reg = TMP;
3976 __ LoadConst64(rhs_reg, rhs_imm);
3977 }
3978 __ Sltu(dst, rhs_reg, lhs);
3979 return (cond == kCondBE);
3980 }
3981 }
3982}
3983
Alexey Frunze299a9392015-12-08 16:08:02 -08003984void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3985 bool is64bit,
3986 LocationSummary* locations,
3987 Mips64Label* label) {
3988 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3989 Location rhs_location = locations->InAt(1);
3990 GpuRegister rhs_reg = ZERO;
3991 int64_t rhs_imm = 0;
3992 bool use_imm = rhs_location.IsConstant();
3993 if (use_imm) {
3994 if (is64bit) {
3995 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3996 } else {
3997 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3998 }
3999 } else {
4000 rhs_reg = rhs_location.AsRegister<GpuRegister>();
4001 }
4002
4003 if (use_imm && rhs_imm == 0) {
4004 switch (cond) {
4005 case kCondEQ:
4006 case kCondBE: // <= 0 if zero
4007 __ Beqzc(lhs, label);
4008 break;
4009 case kCondNE:
4010 case kCondA: // > 0 if non-zero
4011 __ Bnezc(lhs, label);
4012 break;
4013 case kCondLT:
4014 __ Bltzc(lhs, label);
4015 break;
4016 case kCondGE:
4017 __ Bgezc(lhs, label);
4018 break;
4019 case kCondLE:
4020 __ Blezc(lhs, label);
4021 break;
4022 case kCondGT:
4023 __ Bgtzc(lhs, label);
4024 break;
4025 case kCondB: // always false
4026 break;
4027 case kCondAE: // always true
4028 __ Bc(label);
4029 break;
4030 }
4031 } else {
4032 if (use_imm) {
4033 rhs_reg = TMP;
4034 __ LoadConst64(rhs_reg, rhs_imm);
4035 }
4036 switch (cond) {
4037 case kCondEQ:
4038 __ Beqc(lhs, rhs_reg, label);
4039 break;
4040 case kCondNE:
4041 __ Bnec(lhs, rhs_reg, label);
4042 break;
4043 case kCondLT:
4044 __ Bltc(lhs, rhs_reg, label);
4045 break;
4046 case kCondGE:
4047 __ Bgec(lhs, rhs_reg, label);
4048 break;
4049 case kCondLE:
4050 __ Bgec(rhs_reg, lhs, label);
4051 break;
4052 case kCondGT:
4053 __ Bltc(rhs_reg, lhs, label);
4054 break;
4055 case kCondB:
4056 __ Bltuc(lhs, rhs_reg, label);
4057 break;
4058 case kCondAE:
4059 __ Bgeuc(lhs, rhs_reg, label);
4060 break;
4061 case kCondBE:
4062 __ Bgeuc(rhs_reg, lhs, label);
4063 break;
4064 case kCondA:
4065 __ Bltuc(rhs_reg, lhs, label);
4066 break;
4067 }
4068 }
4069}
4070
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004071void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
4072 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004073 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004074 LocationSummary* locations) {
4075 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4076 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4077 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004078 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004079 switch (cond) {
4080 case kCondEQ:
4081 __ CmpEqS(FTMP, lhs, rhs);
4082 __ Mfc1(dst, FTMP);
4083 __ Andi(dst, dst, 1);
4084 break;
4085 case kCondNE:
4086 __ CmpEqS(FTMP, lhs, rhs);
4087 __ Mfc1(dst, FTMP);
4088 __ Addiu(dst, dst, 1);
4089 break;
4090 case kCondLT:
4091 if (gt_bias) {
4092 __ CmpLtS(FTMP, lhs, rhs);
4093 } else {
4094 __ CmpUltS(FTMP, lhs, rhs);
4095 }
4096 __ Mfc1(dst, FTMP);
4097 __ Andi(dst, dst, 1);
4098 break;
4099 case kCondLE:
4100 if (gt_bias) {
4101 __ CmpLeS(FTMP, lhs, rhs);
4102 } else {
4103 __ CmpUleS(FTMP, lhs, rhs);
4104 }
4105 __ Mfc1(dst, FTMP);
4106 __ Andi(dst, dst, 1);
4107 break;
4108 case kCondGT:
4109 if (gt_bias) {
4110 __ CmpUltS(FTMP, rhs, lhs);
4111 } else {
4112 __ CmpLtS(FTMP, rhs, lhs);
4113 }
4114 __ Mfc1(dst, FTMP);
4115 __ Andi(dst, dst, 1);
4116 break;
4117 case kCondGE:
4118 if (gt_bias) {
4119 __ CmpUleS(FTMP, rhs, lhs);
4120 } else {
4121 __ CmpLeS(FTMP, rhs, lhs);
4122 }
4123 __ Mfc1(dst, FTMP);
4124 __ Andi(dst, dst, 1);
4125 break;
4126 default:
4127 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4128 UNREACHABLE();
4129 }
4130 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004131 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004132 switch (cond) {
4133 case kCondEQ:
4134 __ CmpEqD(FTMP, lhs, rhs);
4135 __ Mfc1(dst, FTMP);
4136 __ Andi(dst, dst, 1);
4137 break;
4138 case kCondNE:
4139 __ CmpEqD(FTMP, lhs, rhs);
4140 __ Mfc1(dst, FTMP);
4141 __ Addiu(dst, dst, 1);
4142 break;
4143 case kCondLT:
4144 if (gt_bias) {
4145 __ CmpLtD(FTMP, lhs, rhs);
4146 } else {
4147 __ CmpUltD(FTMP, lhs, rhs);
4148 }
4149 __ Mfc1(dst, FTMP);
4150 __ Andi(dst, dst, 1);
4151 break;
4152 case kCondLE:
4153 if (gt_bias) {
4154 __ CmpLeD(FTMP, lhs, rhs);
4155 } else {
4156 __ CmpUleD(FTMP, lhs, rhs);
4157 }
4158 __ Mfc1(dst, FTMP);
4159 __ Andi(dst, dst, 1);
4160 break;
4161 case kCondGT:
4162 if (gt_bias) {
4163 __ CmpUltD(FTMP, rhs, lhs);
4164 } else {
4165 __ CmpLtD(FTMP, rhs, lhs);
4166 }
4167 __ Mfc1(dst, FTMP);
4168 __ Andi(dst, dst, 1);
4169 break;
4170 case kCondGE:
4171 if (gt_bias) {
4172 __ CmpUleD(FTMP, rhs, lhs);
4173 } else {
4174 __ CmpLeD(FTMP, rhs, lhs);
4175 }
4176 __ Mfc1(dst, FTMP);
4177 __ Andi(dst, dst, 1);
4178 break;
4179 default:
4180 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4181 UNREACHABLE();
4182 }
4183 }
4184}
4185
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004186bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4187 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004188 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004189 LocationSummary* input_locations,
4190 FpuRegister dst) {
4191 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4192 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004193 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004194 switch (cond) {
4195 case kCondEQ:
4196 __ CmpEqS(dst, lhs, rhs);
4197 return false;
4198 case kCondNE:
4199 __ CmpEqS(dst, lhs, rhs);
4200 return true;
4201 case kCondLT:
4202 if (gt_bias) {
4203 __ CmpLtS(dst, lhs, rhs);
4204 } else {
4205 __ CmpUltS(dst, lhs, rhs);
4206 }
4207 return false;
4208 case kCondLE:
4209 if (gt_bias) {
4210 __ CmpLeS(dst, lhs, rhs);
4211 } else {
4212 __ CmpUleS(dst, lhs, rhs);
4213 }
4214 return false;
4215 case kCondGT:
4216 if (gt_bias) {
4217 __ CmpUltS(dst, rhs, lhs);
4218 } else {
4219 __ CmpLtS(dst, rhs, lhs);
4220 }
4221 return false;
4222 case kCondGE:
4223 if (gt_bias) {
4224 __ CmpUleS(dst, rhs, lhs);
4225 } else {
4226 __ CmpLeS(dst, rhs, lhs);
4227 }
4228 return false;
4229 default:
4230 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4231 UNREACHABLE();
4232 }
4233 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004234 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004235 switch (cond) {
4236 case kCondEQ:
4237 __ CmpEqD(dst, lhs, rhs);
4238 return false;
4239 case kCondNE:
4240 __ CmpEqD(dst, lhs, rhs);
4241 return true;
4242 case kCondLT:
4243 if (gt_bias) {
4244 __ CmpLtD(dst, lhs, rhs);
4245 } else {
4246 __ CmpUltD(dst, lhs, rhs);
4247 }
4248 return false;
4249 case kCondLE:
4250 if (gt_bias) {
4251 __ CmpLeD(dst, lhs, rhs);
4252 } else {
4253 __ CmpUleD(dst, lhs, rhs);
4254 }
4255 return false;
4256 case kCondGT:
4257 if (gt_bias) {
4258 __ CmpUltD(dst, rhs, lhs);
4259 } else {
4260 __ CmpLtD(dst, rhs, lhs);
4261 }
4262 return false;
4263 case kCondGE:
4264 if (gt_bias) {
4265 __ CmpUleD(dst, rhs, lhs);
4266 } else {
4267 __ CmpLeD(dst, rhs, lhs);
4268 }
4269 return false;
4270 default:
4271 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4272 UNREACHABLE();
4273 }
4274 }
4275}
4276
Alexey Frunze299a9392015-12-08 16:08:02 -08004277void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4278 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004279 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004280 LocationSummary* locations,
4281 Mips64Label* label) {
4282 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4283 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004284 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004285 switch (cond) {
4286 case kCondEQ:
4287 __ CmpEqS(FTMP, lhs, rhs);
4288 __ Bc1nez(FTMP, label);
4289 break;
4290 case kCondNE:
4291 __ CmpEqS(FTMP, lhs, rhs);
4292 __ Bc1eqz(FTMP, label);
4293 break;
4294 case kCondLT:
4295 if (gt_bias) {
4296 __ CmpLtS(FTMP, lhs, rhs);
4297 } else {
4298 __ CmpUltS(FTMP, lhs, rhs);
4299 }
4300 __ Bc1nez(FTMP, label);
4301 break;
4302 case kCondLE:
4303 if (gt_bias) {
4304 __ CmpLeS(FTMP, lhs, rhs);
4305 } else {
4306 __ CmpUleS(FTMP, lhs, rhs);
4307 }
4308 __ Bc1nez(FTMP, label);
4309 break;
4310 case kCondGT:
4311 if (gt_bias) {
4312 __ CmpUltS(FTMP, rhs, lhs);
4313 } else {
4314 __ CmpLtS(FTMP, rhs, lhs);
4315 }
4316 __ Bc1nez(FTMP, label);
4317 break;
4318 case kCondGE:
4319 if (gt_bias) {
4320 __ CmpUleS(FTMP, rhs, lhs);
4321 } else {
4322 __ CmpLeS(FTMP, rhs, lhs);
4323 }
4324 __ Bc1nez(FTMP, label);
4325 break;
4326 default:
4327 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004328 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004329 }
4330 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004331 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004332 switch (cond) {
4333 case kCondEQ:
4334 __ CmpEqD(FTMP, lhs, rhs);
4335 __ Bc1nez(FTMP, label);
4336 break;
4337 case kCondNE:
4338 __ CmpEqD(FTMP, lhs, rhs);
4339 __ Bc1eqz(FTMP, label);
4340 break;
4341 case kCondLT:
4342 if (gt_bias) {
4343 __ CmpLtD(FTMP, lhs, rhs);
4344 } else {
4345 __ CmpUltD(FTMP, lhs, rhs);
4346 }
4347 __ Bc1nez(FTMP, label);
4348 break;
4349 case kCondLE:
4350 if (gt_bias) {
4351 __ CmpLeD(FTMP, lhs, rhs);
4352 } else {
4353 __ CmpUleD(FTMP, lhs, rhs);
4354 }
4355 __ Bc1nez(FTMP, label);
4356 break;
4357 case kCondGT:
4358 if (gt_bias) {
4359 __ CmpUltD(FTMP, rhs, lhs);
4360 } else {
4361 __ CmpLtD(FTMP, rhs, lhs);
4362 }
4363 __ Bc1nez(FTMP, label);
4364 break;
4365 case kCondGE:
4366 if (gt_bias) {
4367 __ CmpUleD(FTMP, rhs, lhs);
4368 } else {
4369 __ CmpLeD(FTMP, rhs, lhs);
4370 }
4371 __ Bc1nez(FTMP, label);
4372 break;
4373 default:
4374 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004375 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004376 }
4377 }
4378}
4379
Alexey Frunze4dda3372015-06-01 18:31:49 -07004380void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004381 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004382 Mips64Label* true_target,
4383 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004384 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004385
David Brazdil0debae72015-11-12 18:37:00 +00004386 if (true_target == nullptr && false_target == nullptr) {
4387 // Nothing to do. The code always falls through.
4388 return;
4389 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004390 // Constant condition, statically compared against "true" (integer value 1).
4391 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004392 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004393 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004394 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004395 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004396 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004397 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004398 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004399 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004400 }
David Brazdil0debae72015-11-12 18:37:00 +00004401 return;
4402 }
4403
4404 // The following code generates these patterns:
4405 // (1) true_target == nullptr && false_target != nullptr
4406 // - opposite condition true => branch to false_target
4407 // (2) true_target != nullptr && false_target == nullptr
4408 // - condition true => branch to true_target
4409 // (3) true_target != nullptr && false_target != nullptr
4410 // - condition true => branch to true_target
4411 // - branch to false_target
4412 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004413 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004414 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004415 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004416 if (true_target == nullptr) {
4417 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4418 } else {
4419 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4420 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004421 } else {
4422 // The condition instruction has not been materialized, use its inputs as
4423 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004424 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004425 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004426 LocationSummary* locations = cond->GetLocations();
4427 IfCondition if_cond = condition->GetCondition();
4428 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004429
David Brazdil0debae72015-11-12 18:37:00 +00004430 if (true_target == nullptr) {
4431 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004432 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004433 }
4434
Alexey Frunze299a9392015-12-08 16:08:02 -08004435 switch (type) {
4436 default:
4437 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4438 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004439 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004440 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4441 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004442 case DataType::Type::kFloat32:
4443 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004444 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4445 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004446 }
4447 }
David Brazdil0debae72015-11-12 18:37:00 +00004448
4449 // If neither branch falls through (case 3), the conditional branch to `true_target`
4450 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4451 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004452 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004453 }
4454}
4455
4456void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004457 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004458 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004459 locations->SetInAt(0, Location::RequiresRegister());
4460 }
4461}
4462
4463void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004464 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4465 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004466 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004467 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004468 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004469 nullptr : codegen_->GetLabelOf(false_successor);
4470 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004471}
4472
4473void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004474 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004475 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004476 InvokeRuntimeCallingConvention calling_convention;
4477 RegisterSet caller_saves = RegisterSet::Empty();
4478 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4479 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004480 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004481 locations->SetInAt(0, Location::RequiresRegister());
4482 }
4483}
4484
4485void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004486 SlowPathCodeMIPS64* slow_path =
4487 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004488 GenerateTestAndBranch(deoptimize,
4489 /* condition_input_index */ 0,
4490 slow_path->GetEntryLabel(),
4491 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004492}
4493
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004494// This function returns true if a conditional move can be generated for HSelect.
4495// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4496// branches and regular moves.
4497//
4498// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4499//
4500// While determining feasibility of a conditional move and setting inputs/outputs
4501// are two distinct tasks, this function does both because they share quite a bit
4502// of common logic.
4503static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4504 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4505 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4506 HCondition* condition = cond->AsCondition();
4507
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004508 DataType::Type cond_type =
4509 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4510 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004511
4512 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4513 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4514 bool is_true_value_zero_constant =
4515 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4516 bool is_false_value_zero_constant =
4517 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4518
4519 bool can_move_conditionally = false;
4520 bool use_const_for_false_in = false;
4521 bool use_const_for_true_in = false;
4522
4523 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004524 if (!DataType::IsFloatingPointType(cond_type)) {
4525 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004526 // Moving int/long on int/long condition.
4527 if (is_true_value_zero_constant) {
4528 // seleqz out_reg, false_reg, cond_reg
4529 can_move_conditionally = true;
4530 use_const_for_true_in = true;
4531 } else if (is_false_value_zero_constant) {
4532 // selnez out_reg, true_reg, cond_reg
4533 can_move_conditionally = true;
4534 use_const_for_false_in = true;
4535 } else if (materialized) {
4536 // Not materializing unmaterialized int conditions
4537 // to keep the instruction count low.
4538 // selnez AT, true_reg, cond_reg
4539 // seleqz TMP, false_reg, cond_reg
4540 // or out_reg, AT, TMP
4541 can_move_conditionally = true;
4542 }
4543 } else {
4544 // Moving float/double on int/long condition.
4545 if (materialized) {
4546 // Not materializing unmaterialized int conditions
4547 // to keep the instruction count low.
4548 can_move_conditionally = true;
4549 if (is_true_value_zero_constant) {
4550 // sltu TMP, ZERO, cond_reg
4551 // mtc1 TMP, temp_cond_reg
4552 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4553 use_const_for_true_in = true;
4554 } else if (is_false_value_zero_constant) {
4555 // sltu TMP, ZERO, cond_reg
4556 // mtc1 TMP, temp_cond_reg
4557 // selnez.fmt out_reg, true_reg, temp_cond_reg
4558 use_const_for_false_in = true;
4559 } else {
4560 // sltu TMP, ZERO, cond_reg
4561 // mtc1 TMP, temp_cond_reg
4562 // sel.fmt temp_cond_reg, false_reg, true_reg
4563 // mov.fmt out_reg, temp_cond_reg
4564 }
4565 }
4566 }
4567 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004568 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004569 // Moving int/long on float/double condition.
4570 can_move_conditionally = true;
4571 if (is_true_value_zero_constant) {
4572 // mfc1 TMP, temp_cond_reg
4573 // seleqz out_reg, false_reg, TMP
4574 use_const_for_true_in = true;
4575 } else if (is_false_value_zero_constant) {
4576 // mfc1 TMP, temp_cond_reg
4577 // selnez out_reg, true_reg, TMP
4578 use_const_for_false_in = true;
4579 } else {
4580 // mfc1 TMP, temp_cond_reg
4581 // selnez AT, true_reg, TMP
4582 // seleqz TMP, false_reg, TMP
4583 // or out_reg, AT, TMP
4584 }
4585 } else {
4586 // Moving float/double on float/double condition.
4587 can_move_conditionally = true;
4588 if (is_true_value_zero_constant) {
4589 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4590 use_const_for_true_in = true;
4591 } else if (is_false_value_zero_constant) {
4592 // selnez.fmt out_reg, true_reg, temp_cond_reg
4593 use_const_for_false_in = true;
4594 } else {
4595 // sel.fmt temp_cond_reg, false_reg, true_reg
4596 // mov.fmt out_reg, temp_cond_reg
4597 }
4598 }
4599 }
4600 }
4601
4602 if (can_move_conditionally) {
4603 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4604 } else {
4605 DCHECK(!use_const_for_false_in);
4606 DCHECK(!use_const_for_true_in);
4607 }
4608
4609 if (locations_to_set != nullptr) {
4610 if (use_const_for_false_in) {
4611 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4612 } else {
4613 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004614 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004615 ? Location::RequiresFpuRegister()
4616 : Location::RequiresRegister());
4617 }
4618 if (use_const_for_true_in) {
4619 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4620 } else {
4621 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004622 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004623 ? Location::RequiresFpuRegister()
4624 : Location::RequiresRegister());
4625 }
4626 if (materialized) {
4627 locations_to_set->SetInAt(2, Location::RequiresRegister());
4628 }
4629
4630 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004631 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004632 ? Location::RequiresFpuRegister()
4633 : Location::RequiresRegister());
4634 } else {
4635 locations_to_set->SetOut(Location::SameAsFirstInput());
4636 }
4637 }
4638
4639 return can_move_conditionally;
4640}
4641
4642
4643void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4644 LocationSummary* locations = select->GetLocations();
4645 Location dst = locations->Out();
4646 Location false_src = locations->InAt(0);
4647 Location true_src = locations->InAt(1);
4648 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4649 GpuRegister cond_reg = TMP;
4650 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004651 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004652 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004653 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004654
4655 if (IsBooleanValueOrMaterializedCondition(cond)) {
4656 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4657 } else {
4658 HCondition* condition = cond->AsCondition();
4659 LocationSummary* cond_locations = cond->GetLocations();
4660 IfCondition if_cond = condition->GetCondition();
4661 cond_type = condition->InputAt(0)->GetType();
4662 switch (cond_type) {
4663 default:
4664 cond_inverted = MaterializeIntLongCompare(if_cond,
4665 /* is64bit */ false,
4666 cond_locations,
4667 cond_reg);
4668 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004669 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004670 cond_inverted = MaterializeIntLongCompare(if_cond,
4671 /* is64bit */ true,
4672 cond_locations,
4673 cond_reg);
4674 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004675 case DataType::Type::kFloat32:
4676 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004677 cond_inverted = MaterializeFpCompare(if_cond,
4678 condition->IsGtBias(),
4679 cond_type,
4680 cond_locations,
4681 fcond_reg);
4682 break;
4683 }
4684 }
4685
4686 if (true_src.IsConstant()) {
4687 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4688 }
4689 if (false_src.IsConstant()) {
4690 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4691 }
4692
4693 switch (dst_type) {
4694 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004695 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004696 __ Mfc1(cond_reg, fcond_reg);
4697 }
4698 if (true_src.IsConstant()) {
4699 if (cond_inverted) {
4700 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4701 } else {
4702 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4703 }
4704 } else if (false_src.IsConstant()) {
4705 if (cond_inverted) {
4706 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4707 } else {
4708 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4709 }
4710 } else {
4711 DCHECK_NE(cond_reg, AT);
4712 if (cond_inverted) {
4713 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4714 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4715 } else {
4716 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4717 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4718 }
4719 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4720 }
4721 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004722 case DataType::Type::kFloat32: {
4723 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004724 // sel*.fmt tests bit 0 of the condition register, account for that.
4725 __ Sltu(TMP, ZERO, cond_reg);
4726 __ Mtc1(TMP, fcond_reg);
4727 }
4728 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4729 if (true_src.IsConstant()) {
4730 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4731 if (cond_inverted) {
4732 __ SelnezS(dst_reg, src_reg, fcond_reg);
4733 } else {
4734 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4735 }
4736 } else if (false_src.IsConstant()) {
4737 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4738 if (cond_inverted) {
4739 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4740 } else {
4741 __ SelnezS(dst_reg, src_reg, fcond_reg);
4742 }
4743 } else {
4744 if (cond_inverted) {
4745 __ SelS(fcond_reg,
4746 true_src.AsFpuRegister<FpuRegister>(),
4747 false_src.AsFpuRegister<FpuRegister>());
4748 } else {
4749 __ SelS(fcond_reg,
4750 false_src.AsFpuRegister<FpuRegister>(),
4751 true_src.AsFpuRegister<FpuRegister>());
4752 }
4753 __ MovS(dst_reg, fcond_reg);
4754 }
4755 break;
4756 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004757 case DataType::Type::kFloat64: {
4758 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004759 // sel*.fmt tests bit 0 of the condition register, account for that.
4760 __ Sltu(TMP, ZERO, cond_reg);
4761 __ Mtc1(TMP, fcond_reg);
4762 }
4763 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4764 if (true_src.IsConstant()) {
4765 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4766 if (cond_inverted) {
4767 __ SelnezD(dst_reg, src_reg, fcond_reg);
4768 } else {
4769 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4770 }
4771 } else if (false_src.IsConstant()) {
4772 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4773 if (cond_inverted) {
4774 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4775 } else {
4776 __ SelnezD(dst_reg, src_reg, fcond_reg);
4777 }
4778 } else {
4779 if (cond_inverted) {
4780 __ SelD(fcond_reg,
4781 true_src.AsFpuRegister<FpuRegister>(),
4782 false_src.AsFpuRegister<FpuRegister>());
4783 } else {
4784 __ SelD(fcond_reg,
4785 false_src.AsFpuRegister<FpuRegister>(),
4786 true_src.AsFpuRegister<FpuRegister>());
4787 }
4788 __ MovD(dst_reg, fcond_reg);
4789 }
4790 break;
4791 }
4792 }
4793}
4794
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004795void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004796 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004797 LocationSummary(flag, LocationSummary::kNoCall);
4798 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004799}
4800
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004801void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4802 __ LoadFromOffset(kLoadWord,
4803 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4804 SP,
4805 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004806}
4807
David Brazdil74eb1b22015-12-14 11:44:01 +00004808void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004809 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004810 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004811}
4812
4813void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004814 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4815 GenConditionalMove(select);
4816 } else {
4817 LocationSummary* locations = select->GetLocations();
4818 Mips64Label false_target;
4819 GenerateTestAndBranch(select,
4820 /* condition_input_index */ 2,
4821 /* true_target */ nullptr,
4822 &false_target);
4823 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4824 __ Bind(&false_target);
4825 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004826}
4827
David Srbecky0cf44932015-12-09 14:09:59 +00004828void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004829 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004830}
4831
David Srbeckyd28f4a02016-03-14 17:14:24 +00004832void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4833 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004834}
4835
4836void CodeGeneratorMIPS64::GenerateNop() {
4837 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004838}
4839
Alexey Frunze4dda3372015-06-01 18:31:49 -07004840void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004841 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004842 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004843 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004844 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004845 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004846 instruction,
4847 object_field_get_with_read_barrier
4848 ? LocationSummary::kCallOnSlowPath
4849 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004850 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4851 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4852 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004853 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004854 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004855 locations->SetOut(Location::RequiresFpuRegister());
4856 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004857 // The output overlaps in the case of an object field get with
4858 // read barriers enabled: we do not want the move to overwrite the
4859 // object's location, as we need it to emit the read barrier.
4860 locations->SetOut(Location::RequiresRegister(),
4861 object_field_get_with_read_barrier
4862 ? Location::kOutputOverlap
4863 : Location::kNoOutputOverlap);
4864 }
4865 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4866 // We need a temporary register for the read barrier marking slow
4867 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004868 if (!kBakerReadBarrierThunksEnableForFields) {
4869 locations->AddTemp(Location::RequiresRegister());
4870 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004871 }
4872}
4873
4874void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4875 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004876 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4877 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004878 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004879 Location obj_loc = locations->InAt(0);
4880 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4881 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004882 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004883 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004884 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004885 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4886
Alexey Frunze4dda3372015-06-01 18:31:49 -07004887 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004888 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004889 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004890 load_type = kLoadUnsignedByte;
4891 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004892 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004893 load_type = kLoadSignedByte;
4894 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004895 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004896 load_type = kLoadUnsignedHalfword;
4897 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004898 case DataType::Type::kInt16:
4899 load_type = kLoadSignedHalfword;
4900 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004901 case DataType::Type::kInt32:
4902 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004903 load_type = kLoadWord;
4904 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004905 case DataType::Type::kInt64:
4906 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004907 load_type = kLoadDoubleword;
4908 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004909 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004910 load_type = kLoadUnsignedWord;
4911 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004912 case DataType::Type::kUint32:
4913 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004914 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004915 LOG(FATAL) << "Unreachable type " << type;
4916 UNREACHABLE();
4917 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004918 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004919 DCHECK(dst_loc.IsRegister());
4920 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004921 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004922 // /* HeapReference<Object> */ dst = *(obj + offset)
4923 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004924 Location temp_loc =
4925 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004926 // Note that a potential implicit null check is handled in this
4927 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4928 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4929 dst_loc,
4930 obj,
4931 offset,
4932 temp_loc,
4933 /* needs_null_check */ true);
4934 if (is_volatile) {
4935 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4936 }
4937 } else {
4938 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4939 if (is_volatile) {
4940 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4941 }
4942 // If read barriers are enabled, emit read barriers other than
4943 // Baker's using a slow path (and also unpoison the loaded
4944 // reference, if heap poisoning is enabled).
4945 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4946 }
4947 } else {
4948 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4949 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004950 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004951 DCHECK(dst_loc.IsFpuRegister());
4952 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004953 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004954 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004955
Alexey Frunze15958152017-02-09 19:08:30 -08004956 // Memory barriers, in the case of references, are handled in the
4957 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004958 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004959 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004960 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004961}
4962
4963void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4964 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4965 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004966 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004967 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004968 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004969 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004970 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004971 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004972 }
4973}
4974
4975void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004976 const FieldInfo& field_info,
4977 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004978 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004979 LocationSummary* locations = instruction->GetLocations();
4980 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004981 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004982 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004983 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004984 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4985 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004986 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4987
Alexey Frunze4dda3372015-06-01 18:31:49 -07004988 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004989 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004990 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004991 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004992 store_type = kStoreByte;
4993 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004994 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004995 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004996 store_type = kStoreHalfword;
4997 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004998 case DataType::Type::kInt32:
4999 case DataType::Type::kFloat32:
5000 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005001 store_type = kStoreWord;
5002 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005003 case DataType::Type::kInt64:
5004 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005005 store_type = kStoreDoubleword;
5006 break;
Aart Bik66c158e2018-01-31 12:55:04 -08005007 case DataType::Type::kUint32:
5008 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005009 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07005010 LOG(FATAL) << "Unreachable type " << type;
5011 UNREACHABLE();
5012 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005013
Alexey Frunze15958152017-02-09 19:08:30 -08005014 if (is_volatile) {
5015 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
5016 }
5017
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005018 if (value_location.IsConstant()) {
5019 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
5020 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
5021 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005022 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005023 DCHECK(value_location.IsRegister());
5024 GpuRegister src = value_location.AsRegister<GpuRegister>();
5025 if (kPoisonHeapReferences && needs_write_barrier) {
5026 // Note that in the case where `value` is a null reference,
5027 // we do not enter this block, as a null reference does not
5028 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01005029 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005030 __ PoisonHeapReference(TMP, src);
5031 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
5032 } else {
5033 __ StoreToOffset(store_type, src, obj, offset, null_checker);
5034 }
5035 } else {
5036 DCHECK(value_location.IsFpuRegister());
5037 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
5038 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
5039 }
5040 }
Alexey Frunze15958152017-02-09 19:08:30 -08005041
Alexey Frunzec061de12017-02-14 13:27:23 -08005042 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01005043 DCHECK(value_location.IsRegister());
5044 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005045 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005046 }
Alexey Frunze15958152017-02-09 19:08:30 -08005047
5048 if (is_volatile) {
5049 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
5050 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005051}
5052
5053void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5054 HandleFieldGet(instruction, instruction->GetFieldInfo());
5055}
5056
5057void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
5058 HandleFieldGet(instruction, instruction->GetFieldInfo());
5059}
5060
5061void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
5062 HandleFieldSet(instruction, instruction->GetFieldInfo());
5063}
5064
5065void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01005066 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005067}
5068
Alexey Frunze15958152017-02-09 19:08:30 -08005069void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
5070 HInstruction* instruction,
5071 Location out,
5072 uint32_t offset,
5073 Location maybe_temp,
5074 ReadBarrierOption read_barrier_option) {
5075 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5076 if (read_barrier_option == kWithReadBarrier) {
5077 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005078 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
5079 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5080 }
Alexey Frunze15958152017-02-09 19:08:30 -08005081 if (kUseBakerReadBarrier) {
5082 // Load with fast path based Baker's read barrier.
5083 // /* HeapReference<Object> */ out = *(out + offset)
5084 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5085 out,
5086 out_reg,
5087 offset,
5088 maybe_temp,
5089 /* needs_null_check */ false);
5090 } else {
5091 // Load with slow path based read barrier.
5092 // Save the value of `out` into `maybe_temp` before overwriting it
5093 // in the following move operation, as we will need it for the
5094 // read barrier below.
5095 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
5096 // /* HeapReference<Object> */ out = *(out + offset)
5097 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5098 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5099 }
5100 } else {
5101 // Plain load with no read barrier.
5102 // /* HeapReference<Object> */ out = *(out + offset)
5103 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5104 __ MaybeUnpoisonHeapReference(out_reg);
5105 }
5106}
5107
5108void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
5109 HInstruction* instruction,
5110 Location out,
5111 Location obj,
5112 uint32_t offset,
5113 Location maybe_temp,
5114 ReadBarrierOption read_barrier_option) {
5115 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5116 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
5117 if (read_barrier_option == kWithReadBarrier) {
5118 CHECK(kEmitCompilerReadBarrier);
5119 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005120 if (!kBakerReadBarrierThunksEnableForFields) {
5121 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5122 }
Alexey Frunze15958152017-02-09 19:08:30 -08005123 // Load with fast path based Baker's read barrier.
5124 // /* HeapReference<Object> */ out = *(obj + offset)
5125 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5126 out,
5127 obj_reg,
5128 offset,
5129 maybe_temp,
5130 /* needs_null_check */ false);
5131 } else {
5132 // Load with slow path based read barrier.
5133 // /* HeapReference<Object> */ out = *(obj + offset)
5134 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5135 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5136 }
5137 } else {
5138 // Plain load with no read barrier.
5139 // /* HeapReference<Object> */ out = *(obj + offset)
5140 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5141 __ MaybeUnpoisonHeapReference(out_reg);
5142 }
5143}
5144
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005145static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5146 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5147 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5148 return reg - V0;
5149 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5150 return 13 + (reg - S2);
5151 } else if (reg == S8) { // One more.
5152 return 19;
5153 }
5154 LOG(FATAL) << "Unexpected register " << reg;
5155 UNREACHABLE();
5156}
5157
5158static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5159 int num = GetBakerMarkThunkNumber(reg) +
5160 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5161 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5162}
5163
5164static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5165 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5166 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5167}
5168
5169void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5170 Location root,
5171 GpuRegister obj,
5172 uint32_t offset,
5173 ReadBarrierOption read_barrier_option,
5174 Mips64Label* label_low) {
5175 if (label_low != nullptr) {
5176 DCHECK_EQ(offset, 0x5678u);
5177 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005178 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005179 if (read_barrier_option == kWithReadBarrier) {
5180 DCHECK(kEmitCompilerReadBarrier);
5181 if (kUseBakerReadBarrier) {
5182 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5183 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005184 if (kBakerReadBarrierThunksEnableForGcRoots) {
5185 // Note that we do not actually check the value of `GetIsGcMarking()`
5186 // to decide whether to mark the loaded GC root or not. Instead, we
5187 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5188 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5189 // vice versa.
5190 //
5191 // We use thunks for the slow path. That thunk checks the reference
5192 // and jumps to the entrypoint if needed.
5193 //
5194 // temp = Thread::Current()->pReadBarrierMarkReg00
5195 // // AKA &art_quick_read_barrier_mark_introspection.
5196 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5197 // if (temp != nullptr) {
5198 // temp = &gc_root_thunk<root_reg>
5199 // root = temp(root)
5200 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005201
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005202 const int32_t entry_point_offset =
5203 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5204 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5205 int16_t offset_low = Low16Bits(offset);
5206 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5207 // extension in lwu.
5208 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5209 GpuRegister base = short_offset ? obj : TMP;
5210 // Loading the entrypoint does not require a load acquire since it is only changed when
5211 // threads are suspended or running a checkpoint.
5212 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5213 if (!short_offset) {
5214 DCHECK(!label_low);
5215 __ Daui(base, obj, offset_high);
5216 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005217 Mips64Label skip_call;
5218 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005219 if (label_low != nullptr) {
5220 DCHECK(short_offset);
5221 __ Bind(label_low);
5222 }
5223 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5224 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5225 // in delay slot.
5226 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005227 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005228 } else {
5229 // Note that we do not actually check the value of `GetIsGcMarking()`
5230 // to decide whether to mark the loaded GC root or not. Instead, we
5231 // load into `temp` (T9) the read barrier mark entry point corresponding
5232 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5233 // is false, and vice versa.
5234 //
5235 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5236 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5237 // if (temp != null) {
5238 // root = temp(root)
5239 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005240
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005241 if (label_low != nullptr) {
5242 __ Bind(label_low);
5243 }
5244 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5245 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5246 static_assert(
5247 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5248 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5249 "have different sizes.");
5250 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5251 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5252 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005253
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005254 // Slow path marking the GC root `root`.
5255 Location temp = Location::RegisterLocation(T9);
5256 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005257 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005258 instruction,
5259 root,
5260 /*entrypoint*/ temp);
5261 codegen_->AddSlowPath(slow_path);
5262
5263 const int32_t entry_point_offset =
5264 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5265 // Loading the entrypoint does not require a load acquire since it is only changed when
5266 // threads are suspended or running a checkpoint.
5267 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5268 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5269 __ Bind(slow_path->GetExitLabel());
5270 }
Alexey Frunze15958152017-02-09 19:08:30 -08005271 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005272 if (label_low != nullptr) {
5273 __ Bind(label_low);
5274 }
Alexey Frunze15958152017-02-09 19:08:30 -08005275 // GC root loaded through a slow path for read barriers other
5276 // than Baker's.
5277 // /* GcRoot<mirror::Object>* */ root = obj + offset
5278 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5279 // /* mirror::Object* */ root = root->Read()
5280 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5281 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005282 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005283 if (label_low != nullptr) {
5284 __ Bind(label_low);
5285 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005286 // Plain GC root load with no read barrier.
5287 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5288 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5289 // Note that GC roots are not affected by heap poisoning, thus we
5290 // do not have to unpoison `root_reg` here.
5291 }
5292}
5293
Alexey Frunze15958152017-02-09 19:08:30 -08005294void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5295 Location ref,
5296 GpuRegister obj,
5297 uint32_t offset,
5298 Location temp,
5299 bool needs_null_check) {
5300 DCHECK(kEmitCompilerReadBarrier);
5301 DCHECK(kUseBakerReadBarrier);
5302
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005303 if (kBakerReadBarrierThunksEnableForFields) {
5304 // Note that we do not actually check the value of `GetIsGcMarking()`
5305 // to decide whether to mark the loaded reference or not. Instead, we
5306 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5307 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5308 // vice versa.
5309 //
5310 // We use thunks for the slow path. That thunk checks the reference
5311 // and jumps to the entrypoint if needed. If the holder is not gray,
5312 // it issues a load-load memory barrier and returns to the original
5313 // reference load.
5314 //
5315 // temp = Thread::Current()->pReadBarrierMarkReg00
5316 // // AKA &art_quick_read_barrier_mark_introspection.
5317 // if (temp != nullptr) {
5318 // temp = &field_array_thunk<holder_reg>
5319 // temp()
5320 // }
5321 // not_gray_return_address:
5322 // // If the offset is too large to fit into the lw instruction, we
5323 // // use an adjusted base register (TMP) here. This register
5324 // // receives bits 16 ... 31 of the offset before the thunk invocation
5325 // // and the thunk benefits from it.
5326 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5327 // gray_return_address:
5328
5329 DCHECK(temp.IsInvalid());
5330 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5331 const int32_t entry_point_offset =
5332 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5333 // There may have or may have not been a null check if the field offset is smaller than
5334 // the page size.
5335 // There must've been a null check in case it's actually a load from an array.
5336 // We will, however, perform an explicit null check in the thunk as it's easier to
5337 // do it than not.
5338 if (instruction->IsArrayGet()) {
5339 DCHECK(!needs_null_check);
5340 }
5341 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5342 // Loading the entrypoint does not require a load acquire since it is only changed when
5343 // threads are suspended or running a checkpoint.
5344 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5345 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005346 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005347 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005348 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005349 __ Nop(); // In forbidden slot.
5350 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005351 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005352 // /* HeapReference<Object> */ ref = *(obj + offset)
5353 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5354 } else {
5355 int16_t offset_low = Low16Bits(offset);
5356 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005357 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005358 __ Daui(TMP, obj, offset_high); // In delay slot.
5359 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005360 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005361 // /* HeapReference<Object> */ ref = *(obj + offset)
5362 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5363 }
5364 if (needs_null_check) {
5365 MaybeRecordImplicitNullCheck(instruction);
5366 }
5367 __ MaybeUnpoisonHeapReference(ref_reg);
5368 return;
5369 }
5370
Alexey Frunze15958152017-02-09 19:08:30 -08005371 // /* HeapReference<Object> */ ref = *(obj + offset)
5372 Location no_index = Location::NoLocation();
5373 ScaleFactor no_scale_factor = TIMES_1;
5374 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5375 ref,
5376 obj,
5377 offset,
5378 no_index,
5379 no_scale_factor,
5380 temp,
5381 needs_null_check);
5382}
5383
5384void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5385 Location ref,
5386 GpuRegister obj,
5387 uint32_t data_offset,
5388 Location index,
5389 Location temp,
5390 bool needs_null_check) {
5391 DCHECK(kEmitCompilerReadBarrier);
5392 DCHECK(kUseBakerReadBarrier);
5393
5394 static_assert(
5395 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5396 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005397 ScaleFactor scale_factor = TIMES_4;
5398
5399 if (kBakerReadBarrierThunksEnableForArrays) {
5400 // Note that we do not actually check the value of `GetIsGcMarking()`
5401 // to decide whether to mark the loaded reference or not. Instead, we
5402 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5403 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5404 // vice versa.
5405 //
5406 // We use thunks for the slow path. That thunk checks the reference
5407 // and jumps to the entrypoint if needed. If the holder is not gray,
5408 // it issues a load-load memory barrier and returns to the original
5409 // reference load.
5410 //
5411 // temp = Thread::Current()->pReadBarrierMarkReg00
5412 // // AKA &art_quick_read_barrier_mark_introspection.
5413 // if (temp != nullptr) {
5414 // temp = &field_array_thunk<holder_reg>
5415 // temp()
5416 // }
5417 // not_gray_return_address:
5418 // // The element address is pre-calculated in the TMP register before the
5419 // // thunk invocation and the thunk benefits from it.
5420 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5421 // gray_return_address:
5422
5423 DCHECK(temp.IsInvalid());
5424 DCHECK(index.IsValid());
5425 const int32_t entry_point_offset =
5426 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5427 // We will not do the explicit null check in the thunk as some form of a null check
5428 // must've been done earlier.
5429 DCHECK(!needs_null_check);
5430 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5431 // Loading the entrypoint does not require a load acquire since it is only changed when
5432 // threads are suspended or running a checkpoint.
5433 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005434 Mips64Label skip_call;
5435 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005436 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5437 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5438 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5439 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005440 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005441 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5442 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5443 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5444 __ MaybeUnpoisonHeapReference(ref_reg);
5445 return;
5446 }
5447
Alexey Frunze15958152017-02-09 19:08:30 -08005448 // /* HeapReference<Object> */ ref =
5449 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005450 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5451 ref,
5452 obj,
5453 data_offset,
5454 index,
5455 scale_factor,
5456 temp,
5457 needs_null_check);
5458}
5459
5460void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5461 Location ref,
5462 GpuRegister obj,
5463 uint32_t offset,
5464 Location index,
5465 ScaleFactor scale_factor,
5466 Location temp,
5467 bool needs_null_check,
5468 bool always_update_field) {
5469 DCHECK(kEmitCompilerReadBarrier);
5470 DCHECK(kUseBakerReadBarrier);
5471
5472 // In slow path based read barriers, the read barrier call is
5473 // inserted after the original load. However, in fast path based
5474 // Baker's read barriers, we need to perform the load of
5475 // mirror::Object::monitor_ *before* the original reference load.
5476 // This load-load ordering is required by the read barrier.
5477 // The fast path/slow path (for Baker's algorithm) should look like:
5478 //
5479 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5480 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5481 // HeapReference<Object> ref = *src; // Original reference load.
5482 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5483 // if (is_gray) {
5484 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5485 // }
5486 //
5487 // Note: the original implementation in ReadBarrier::Barrier is
5488 // slightly more complex as it performs additional checks that we do
5489 // not do here for performance reasons.
5490
5491 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5492 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5493 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5494
5495 // /* int32_t */ monitor = obj->monitor_
5496 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5497 if (needs_null_check) {
5498 MaybeRecordImplicitNullCheck(instruction);
5499 }
5500 // /* LockWord */ lock_word = LockWord(monitor)
5501 static_assert(sizeof(LockWord) == sizeof(int32_t),
5502 "art::LockWord and int32_t have different sizes.");
5503
5504 __ Sync(0); // Barrier to prevent load-load reordering.
5505
5506 // The actual reference load.
5507 if (index.IsValid()) {
5508 // Load types involving an "index": ArrayGet,
5509 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5510 // intrinsics.
5511 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5512 if (index.IsConstant()) {
5513 size_t computed_offset =
5514 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5515 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5516 } else {
5517 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005518 if (scale_factor == TIMES_1) {
5519 __ Daddu(TMP, index_reg, obj);
5520 } else {
5521 __ Dlsa(TMP, index_reg, obj, scale_factor);
5522 }
Alexey Frunze15958152017-02-09 19:08:30 -08005523 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5524 }
5525 } else {
5526 // /* HeapReference<Object> */ ref = *(obj + offset)
5527 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5528 }
5529
5530 // Object* ref = ref_addr->AsMirrorPtr()
5531 __ MaybeUnpoisonHeapReference(ref_reg);
5532
5533 // Slow path marking the object `ref` when it is gray.
5534 SlowPathCodeMIPS64* slow_path;
5535 if (always_update_field) {
5536 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5537 // of the form `obj + field_offset`, where `obj` is a register and
5538 // `field_offset` is a register. Thus `offset` and `scale_factor`
5539 // above are expected to be null in this code path.
5540 DCHECK_EQ(offset, 0u);
5541 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005542 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005543 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5544 ref,
5545 obj,
5546 /* field_offset */ index,
5547 temp_reg);
5548 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005549 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005550 }
5551 AddSlowPath(slow_path);
5552
5553 // if (rb_state == ReadBarrier::GrayState())
5554 // ref = ReadBarrier::Mark(ref);
5555 // Given the numeric representation, it's enough to check the low bit of the
5556 // rb_state. We do that by shifting the bit into the sign bit (31) and
5557 // performing a branch on less than zero.
5558 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5559 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5560 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5561 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5562 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5563 __ Bind(slow_path->GetExitLabel());
5564}
5565
5566void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5567 Location out,
5568 Location ref,
5569 Location obj,
5570 uint32_t offset,
5571 Location index) {
5572 DCHECK(kEmitCompilerReadBarrier);
5573
5574 // Insert a slow path based read barrier *after* the reference load.
5575 //
5576 // If heap poisoning is enabled, the unpoisoning of the loaded
5577 // reference will be carried out by the runtime within the slow
5578 // path.
5579 //
5580 // Note that `ref` currently does not get unpoisoned (when heap
5581 // poisoning is enabled), which is alright as the `ref` argument is
5582 // not used by the artReadBarrierSlow entry point.
5583 //
5584 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005585 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005586 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5587 AddSlowPath(slow_path);
5588
5589 __ Bc(slow_path->GetEntryLabel());
5590 __ Bind(slow_path->GetExitLabel());
5591}
5592
5593void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5594 Location out,
5595 Location ref,
5596 Location obj,
5597 uint32_t offset,
5598 Location index) {
5599 if (kEmitCompilerReadBarrier) {
5600 // Baker's read barriers shall be handled by the fast path
5601 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5602 DCHECK(!kUseBakerReadBarrier);
5603 // If heap poisoning is enabled, unpoisoning will be taken care of
5604 // by the runtime within the slow path.
5605 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5606 } else if (kPoisonHeapReferences) {
5607 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5608 }
5609}
5610
5611void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5612 Location out,
5613 Location root) {
5614 DCHECK(kEmitCompilerReadBarrier);
5615
5616 // Insert a slow path based read barrier *after* the GC root load.
5617 //
5618 // Note that GC roots are not affected by heap poisoning, so we do
5619 // not need to do anything special for this here.
5620 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005621 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005622 AddSlowPath(slow_path);
5623
5624 __ Bc(slow_path->GetEntryLabel());
5625 __ Bind(slow_path->GetExitLabel());
5626}
5627
Alexey Frunze4dda3372015-06-01 18:31:49 -07005628void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005629 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5630 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005631 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005632 switch (type_check_kind) {
5633 case TypeCheckKind::kExactCheck:
5634 case TypeCheckKind::kAbstractClassCheck:
5635 case TypeCheckKind::kClassHierarchyCheck:
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005636 case TypeCheckKind::kArrayObjectCheck: {
5637 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5638 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5639 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005640 break;
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005641 }
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005642 case TypeCheckKind::kArrayCheck:
5643 case TypeCheckKind::kUnresolvedCheck:
5644 case TypeCheckKind::kInterfaceCheck:
5645 call_kind = LocationSummary::kCallOnSlowPath;
5646 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00005647 case TypeCheckKind::kBitstringCheck:
5648 break;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005649 }
5650
Vladimir Markoca6fff82017-10-03 14:49:14 +01005651 LocationSummary* locations =
5652 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005653 if (baker_read_barrier_slow_path) {
5654 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5655 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005656 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00005657 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
5658 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
5659 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
5660 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
5661 } else {
5662 locations->SetInAt(1, Location::RequiresRegister());
5663 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005664 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005665 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005666 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005667 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005668}
5669
5670void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005671 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005672 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005673 Location obj_loc = locations->InAt(0);
5674 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00005675 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08005676 Location out_loc = locations->Out();
5677 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5678 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5679 DCHECK_LE(num_temps, 1u);
5680 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005681 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5682 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5683 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5684 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005685 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005686 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005687
5688 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005689 // Avoid this check if we know `obj` is not null.
5690 if (instruction->MustDoNullCheck()) {
5691 __ Move(out, ZERO);
5692 __ Beqzc(obj, &done);
5693 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005694
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005695 switch (type_check_kind) {
5696 case TypeCheckKind::kExactCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005697 ReadBarrierOption read_barrier_option =
5698 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005699 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005700 GenerateReferenceLoadTwoRegisters(instruction,
5701 out_loc,
5702 obj_loc,
5703 class_offset,
5704 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005705 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005706 // Classes must be equal for the instanceof to succeed.
Vladimir Marko175e7862018-03-27 09:03:13 +00005707 __ Xor(out, out, cls.AsRegister<GpuRegister>());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005708 __ Sltiu(out, out, 1);
5709 break;
5710 }
5711
5712 case TypeCheckKind::kAbstractClassCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005713 ReadBarrierOption read_barrier_option =
5714 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005715 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005716 GenerateReferenceLoadTwoRegisters(instruction,
5717 out_loc,
5718 obj_loc,
5719 class_offset,
5720 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005721 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005722 // If the class is abstract, we eagerly fetch the super class of the
5723 // object to avoid doing a comparison we know will fail.
5724 Mips64Label loop;
5725 __ Bind(&loop);
5726 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005727 GenerateReferenceLoadOneRegister(instruction,
5728 out_loc,
5729 super_offset,
5730 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005731 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005732 // If `out` is null, we use it for the result, and jump to `done`.
5733 __ Beqzc(out, &done);
Vladimir Marko175e7862018-03-27 09:03:13 +00005734 __ Bnec(out, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005735 __ LoadConst32(out, 1);
5736 break;
5737 }
5738
5739 case TypeCheckKind::kClassHierarchyCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005740 ReadBarrierOption read_barrier_option =
5741 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005742 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005743 GenerateReferenceLoadTwoRegisters(instruction,
5744 out_loc,
5745 obj_loc,
5746 class_offset,
5747 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005748 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005749 // Walk over the class hierarchy to find a match.
5750 Mips64Label loop, success;
5751 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00005752 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005753 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005754 GenerateReferenceLoadOneRegister(instruction,
5755 out_loc,
5756 super_offset,
5757 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005758 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005759 __ Bnezc(out, &loop);
5760 // If `out` is null, we use it for the result, and jump to `done`.
5761 __ Bc(&done);
5762 __ Bind(&success);
5763 __ LoadConst32(out, 1);
5764 break;
5765 }
5766
5767 case TypeCheckKind::kArrayObjectCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005768 ReadBarrierOption read_barrier_option =
5769 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005770 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005771 GenerateReferenceLoadTwoRegisters(instruction,
5772 out_loc,
5773 obj_loc,
5774 class_offset,
5775 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005776 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005777 // Do an exact check.
5778 Mips64Label success;
Vladimir Marko175e7862018-03-27 09:03:13 +00005779 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005780 // Otherwise, we need to check that the object's class is a non-primitive array.
5781 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005782 GenerateReferenceLoadOneRegister(instruction,
5783 out_loc,
5784 component_offset,
5785 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005786 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005787 // If `out` is null, we use it for the result, and jump to `done`.
5788 __ Beqzc(out, &done);
5789 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5790 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5791 __ Sltiu(out, out, 1);
5792 __ Bc(&done);
5793 __ Bind(&success);
5794 __ LoadConst32(out, 1);
5795 break;
5796 }
5797
5798 case TypeCheckKind::kArrayCheck: {
5799 // No read barrier since the slow path will retry upon failure.
5800 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005801 GenerateReferenceLoadTwoRegisters(instruction,
5802 out_loc,
5803 obj_loc,
5804 class_offset,
5805 maybe_temp_loc,
5806 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005807 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005808 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5809 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005810 codegen_->AddSlowPath(slow_path);
Vladimir Marko175e7862018-03-27 09:03:13 +00005811 __ Bnec(out, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005812 __ LoadConst32(out, 1);
5813 break;
5814 }
5815
5816 case TypeCheckKind::kUnresolvedCheck:
5817 case TypeCheckKind::kInterfaceCheck: {
5818 // Note that we indeed only call on slow path, but we always go
5819 // into the slow path for the unresolved and interface check
5820 // cases.
5821 //
5822 // We cannot directly call the InstanceofNonTrivial runtime
5823 // entry point without resorting to a type checking slow path
5824 // here (i.e. by calling InvokeRuntime directly), as it would
5825 // require to assign fixed registers for the inputs of this
5826 // HInstanceOf instruction (following the runtime calling
5827 // convention), which might be cluttered by the potential first
5828 // read barrier emission at the beginning of this method.
5829 //
5830 // TODO: Introduce a new runtime entry point taking the object
5831 // to test (instead of its class) as argument, and let it deal
5832 // with the read barrier issues. This will let us refactor this
5833 // case of the `switch` code as it was previously (with a direct
5834 // call to the runtime not using a type checking slow path).
5835 // This should also be beneficial for the other cases above.
5836 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005837 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5838 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005839 codegen_->AddSlowPath(slow_path);
5840 __ Bc(slow_path->GetEntryLabel());
5841 break;
5842 }
Vladimir Marko175e7862018-03-27 09:03:13 +00005843
5844 case TypeCheckKind::kBitstringCheck: {
5845 // /* HeapReference<Class> */ temp = obj->klass_
5846 GenerateReferenceLoadTwoRegisters(instruction,
5847 out_loc,
5848 obj_loc,
5849 class_offset,
5850 maybe_temp_loc,
5851 kWithoutReadBarrier);
5852
5853 GenerateBitstringTypeCheckCompare(instruction, out);
5854 __ Sltiu(out, out, 1);
5855 break;
5856 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005857 }
5858
5859 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005860
5861 if (slow_path != nullptr) {
5862 __ Bind(slow_path->GetExitLabel());
5863 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005864}
5865
5866void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005867 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005868 locations->SetOut(Location::ConstantLocation(constant));
5869}
5870
5871void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5872 // Will be generated at use site.
5873}
5874
5875void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005876 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005877 locations->SetOut(Location::ConstantLocation(constant));
5878}
5879
5880void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5881 // Will be generated at use site.
5882}
5883
Calin Juravle175dc732015-08-25 15:42:32 +01005884void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5885 // The trampoline uses the same calling convention as dex calling conventions,
5886 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5887 // the method_idx.
5888 HandleInvoke(invoke);
5889}
5890
5891void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5892 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5893}
5894
Alexey Frunze4dda3372015-06-01 18:31:49 -07005895void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5896 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5897 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5898}
5899
5900void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5901 HandleInvoke(invoke);
5902 // The register T0 is required to be used for the hidden argument in
5903 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5904 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5905}
5906
5907void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5908 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5909 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005910 Location receiver = invoke->GetLocations()->InAt(0);
5911 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005912 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005913
5914 // Set the hidden argument.
5915 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5916 invoke->GetDexMethodIndex());
5917
5918 // temp = object->GetClass();
5919 if (receiver.IsStackSlot()) {
5920 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5921 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5922 } else {
5923 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5924 }
5925 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005926 // Instead of simply (possibly) unpoisoning `temp` here, we should
5927 // emit a read barrier for the previous class reference load.
5928 // However this is not required in practice, as this is an
5929 // intermediate/temporary reference and because the current
5930 // concurrent copying collector keeps the from-space memory
5931 // intact/accessible until the end of the marking phase (the
5932 // concurrent copying collector may not in the future).
5933 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005934 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5935 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5936 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005937 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005938 // temp = temp->GetImtEntryAt(method_offset);
5939 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5940 // T9 = temp->GetEntryPoint();
5941 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5942 // T9();
5943 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005944 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005945 DCHECK(!codegen_->IsLeafMethod());
5946 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5947}
5948
5949void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005950 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5951 if (intrinsic.TryDispatch(invoke)) {
5952 return;
5953 }
5954
Alexey Frunze4dda3372015-06-01 18:31:49 -07005955 HandleInvoke(invoke);
5956}
5957
5958void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005959 // Explicit clinit checks triggered by static invokes must have been pruned by
5960 // art::PrepareForRegisterAllocation.
5961 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005962
Chris Larsen3039e382015-08-26 07:54:08 -07005963 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5964 if (intrinsic.TryDispatch(invoke)) {
5965 return;
5966 }
5967
Alexey Frunze4dda3372015-06-01 18:31:49 -07005968 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005969}
5970
Orion Hodsonac141392017-01-13 11:53:47 +00005971void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5972 HandleInvoke(invoke);
5973}
5974
5975void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5976 codegen_->GenerateInvokePolymorphicCall(invoke);
5977}
5978
Orion Hodson4c8e12e2018-05-18 08:33:20 +01005979void LocationsBuilderMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5980 HandleInvoke(invoke);
5981}
5982
5983void InstructionCodeGeneratorMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5984 codegen_->GenerateInvokeCustomCall(invoke);
5985}
5986
Chris Larsen3039e382015-08-26 07:54:08 -07005987static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005988 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005989 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5990 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005991 return true;
5992 }
5993 return false;
5994}
5995
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005996HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005997 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005998 bool fallback_load = false;
5999 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006000 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006001 case HLoadString::LoadKind::kBootImageRelRo:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006002 case HLoadString::LoadKind::kBssEntry:
6003 DCHECK(!Runtime::Current()->UseJitCompilation());
6004 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006005 case HLoadString::LoadKind::kJitBootImageAddress:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006006 case HLoadString::LoadKind::kJitTableAddress:
6007 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006008 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006009 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01006010 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006011 }
6012 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006013 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006014 }
6015 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00006016}
6017
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006018HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
6019 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006020 bool fallback_load = false;
6021 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006022 case HLoadClass::LoadKind::kInvalid:
6023 LOG(FATAL) << "UNREACHABLE";
6024 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006025 case HLoadClass::LoadKind::kReferrersClass:
6026 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006027 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006028 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006029 case HLoadClass::LoadKind::kBssEntry:
6030 DCHECK(!Runtime::Current()->UseJitCompilation());
6031 break;
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006032 case HLoadClass::LoadKind::kJitBootImageAddress:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006033 case HLoadClass::LoadKind::kJitTableAddress:
6034 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006035 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006036 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08006037 break;
6038 }
6039 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006040 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006041 }
6042 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01006043}
6044
Vladimir Markodc151b22015-10-15 18:02:30 +01006045HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
6046 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01006047 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08006048 // On MIPS64 we support all dispatch types.
6049 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01006050}
6051
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006052void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
6053 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006054 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00006055 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08006056 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
6057 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
6058
Alexey Frunze19f6c692016-11-30 19:19:55 -08006059 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006060 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00006061 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006062 uint32_t offset =
6063 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00006064 __ LoadFromOffset(kLoadDoubleword,
6065 temp.AsRegister<GpuRegister>(),
6066 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006067 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00006068 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006069 }
Vladimir Marko58155012015-08-19 12:49:41 +00006070 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00006071 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00006072 break;
Vladimir Marko65979462017-05-19 17:25:12 +01006073 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
6074 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006075 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006076 NewBootImageMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006077 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006078 NewBootImageMethodPatch(invoke->GetTargetMethod(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006079 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01006080 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6081 break;
6082 }
Vladimir Markob066d432018-01-03 13:14:37 +00006083 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006084 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00006085 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset);
6086 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high);
6087 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6088 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
6089 __ Lwu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6090 break;
6091 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006092 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006093 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006094 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006095 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
6096 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
6097 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08006098 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6099 break;
6100 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006101 case HInvokeStaticOrDirect::MethodLoadKind::kJitDirectAddress:
6102 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
6103 kLoadDoubleword,
6104 DeduplicateUint64Literal(invoke->GetMethodAddress()));
6105 break;
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006106 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
6107 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
6108 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07006109 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006110 }
6111
Alexey Frunze19f6c692016-11-30 19:19:55 -08006112 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00006113 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006114 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00006115 break;
Vladimir Marko58155012015-08-19 12:49:41 +00006116 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
6117 // T9 = callee_method->entry_point_from_quick_compiled_code_;
6118 __ LoadFromOffset(kLoadDoubleword,
6119 T9,
6120 callee_method.AsRegister<GpuRegister>(),
6121 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07006122 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00006123 // T9()
6124 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006125 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00006126 break;
6127 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006128 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
6129
Alexey Frunze4dda3372015-06-01 18:31:49 -07006130 DCHECK(!IsLeafMethod());
6131}
6132
6133void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006134 // Explicit clinit checks triggered by static invokes must have been pruned by
6135 // art::PrepareForRegisterAllocation.
6136 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006137
6138 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6139 return;
6140 }
6141
6142 LocationSummary* locations = invoke->GetLocations();
6143 codegen_->GenerateStaticOrDirectCall(invoke,
6144 locations->HasTemps()
6145 ? locations->GetTemp(0)
6146 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006147}
6148
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006149void CodeGeneratorMIPS64::GenerateVirtualCall(
6150 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006151 // Use the calling convention instead of the location of the receiver, as
6152 // intrinsics may have put the receiver in a different register. In the intrinsics
6153 // slow path, the arguments have been moved to the right place, so here we are
6154 // guaranteed that the receiver is the first register of the calling convention.
6155 InvokeDexCallingConvention calling_convention;
6156 GpuRegister receiver = calling_convention.GetRegisterAt(0);
6157
Alexey Frunze53afca12015-11-05 16:34:23 -08006158 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006159 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6160 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
6161 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006162 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006163
6164 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006165 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08006166 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006167 // Instead of simply (possibly) unpoisoning `temp` here, we should
6168 // emit a read barrier for the previous class reference load.
6169 // However this is not required in practice, as this is an
6170 // intermediate/temporary reference and because the current
6171 // concurrent copying collector keeps the from-space memory
6172 // intact/accessible until the end of the marking phase (the
6173 // concurrent copying collector may not in the future).
6174 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006175 // temp = temp->GetMethodAt(method_offset);
6176 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6177 // T9 = temp->GetEntryPoint();
6178 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6179 // T9();
6180 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006181 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006182 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006183}
6184
6185void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6186 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6187 return;
6188 }
6189
6190 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006191 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006192}
6193
6194void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006195 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006196 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006197 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006198 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6199 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006200 return;
6201 }
Vladimir Marko41559982017-01-06 14:04:23 +00006202 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006203
Alexey Frunze15958152017-02-09 19:08:30 -08006204 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6205 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006206 ? LocationSummary::kCallOnSlowPath
6207 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006208 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006209 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6210 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6211 }
Vladimir Marko41559982017-01-06 14:04:23 +00006212 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006213 locations->SetInAt(0, Location::RequiresRegister());
6214 }
6215 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006216 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6217 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6218 // Rely on the type resolution or initialization and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006219 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006220 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006221 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006222 }
6223 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006224}
6225
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006226// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6227// move.
6228void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006229 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006230 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006231 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006232 return;
6233 }
Vladimir Marko41559982017-01-06 14:04:23 +00006234 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006235
Vladimir Marko41559982017-01-06 14:04:23 +00006236 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006237 Location out_loc = locations->Out();
6238 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6239 GpuRegister current_method_reg = ZERO;
6240 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006241 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006242 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6243 }
6244
Alexey Frunze15958152017-02-09 19:08:30 -08006245 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6246 ? kWithoutReadBarrier
6247 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006248 bool generate_null_check = false;
6249 switch (load_kind) {
6250 case HLoadClass::LoadKind::kReferrersClass:
6251 DCHECK(!cls->CanCallRuntime());
6252 DCHECK(!cls->MustGenerateClinitCheck());
6253 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6254 GenerateGcRootFieldLoad(cls,
6255 out_loc,
6256 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006257 ArtMethod::DeclaringClassOffset().Int32Value(),
6258 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006259 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006260 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006261 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006262 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006263 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006264 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006265 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006266 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006267 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006268 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6269 break;
6270 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006271 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006272 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006273 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006274 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006275 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006276 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006277 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006278 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6279 __ Lwu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006280 break;
6281 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006282 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006283 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6284 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006285 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6286 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006287 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006288 GenerateGcRootFieldLoad(cls,
6289 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006290 out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006291 /* placeholder */ 0x5678,
6292 read_barrier_option,
6293 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006294 generate_null_check = true;
6295 break;
6296 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006297 case HLoadClass::LoadKind::kJitBootImageAddress: {
6298 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
6299 uint32_t address = reinterpret_cast32<uint32_t>(cls->GetClass().Get());
6300 DCHECK_NE(address, 0u);
6301 __ LoadLiteral(out,
6302 kLoadUnsignedWord,
6303 codegen_->DeduplicateBootImageAddressLiteral(address));
6304 break;
6305 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006306 case HLoadClass::LoadKind::kJitTableAddress:
6307 __ LoadLiteral(out,
6308 kLoadUnsignedWord,
6309 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6310 cls->GetTypeIndex(),
6311 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006312 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006313 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006314 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006315 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006316 LOG(FATAL) << "UNREACHABLE";
6317 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006318 }
6319
6320 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6321 DCHECK(cls->CanCallRuntime());
Vladimir Markoa9f303c2018-07-20 16:43:56 +01006322 SlowPathCodeMIPS64* slow_path =
6323 new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(cls, cls);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006324 codegen_->AddSlowPath(slow_path);
6325 if (generate_null_check) {
6326 __ Beqzc(out, slow_path->GetEntryLabel());
6327 }
6328 if (cls->MustGenerateClinitCheck()) {
6329 GenerateClassInitializationCheck(slow_path, out);
6330 } else {
6331 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006332 }
6333 }
6334}
6335
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006336void LocationsBuilderMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6337 InvokeRuntimeCallingConvention calling_convention;
6338 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6339 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, loc, loc);
6340}
6341
6342void InstructionCodeGeneratorMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6343 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6344}
6345
Orion Hodson18259d72018-04-12 11:18:23 +01006346void LocationsBuilderMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6347 InvokeRuntimeCallingConvention calling_convention;
6348 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6349 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, loc, loc);
6350}
6351
6352void InstructionCodeGeneratorMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6353 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6354}
6355
David Brazdilcb1c0552015-08-04 16:22:25 +01006356static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006357 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006358}
6359
Alexey Frunze4dda3372015-06-01 18:31:49 -07006360void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6361 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006362 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006363 locations->SetOut(Location::RequiresRegister());
6364}
6365
6366void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6367 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006368 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6369}
6370
6371void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006372 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006373}
6374
6375void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6376 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006377}
6378
Alexey Frunze4dda3372015-06-01 18:31:49 -07006379void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006380 HLoadString::LoadKind load_kind = load->GetLoadKind();
6381 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006382 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006383 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006384 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006385 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006386 } else {
6387 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006388 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6389 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6390 // Rely on the pResolveString and marking to save everything we need.
Vladimir Marko3232dbb2018-07-25 15:42:46 +01006391 locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006392 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006393 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006394 }
6395 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006396 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006397}
6398
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006399// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6400// move.
6401void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006402 HLoadString::LoadKind load_kind = load->GetLoadKind();
6403 LocationSummary* locations = load->GetLocations();
6404 Location out_loc = locations->Out();
6405 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6406
6407 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006408 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6409 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006410 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006411 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006412 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006413 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006414 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006415 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006416 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006417 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006418 case HLoadString::LoadKind::kBootImageRelRo: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006419 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006420 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006421 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006422 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006423 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006424 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006425 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6426 __ Lwu(out, AT, /* placeholder */ 0x5678);
6427 return;
6428 }
6429 case HLoadString::LoadKind::kBssEntry: {
6430 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6431 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6432 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6433 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6434 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006435 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006436 GenerateGcRootFieldLoad(load,
6437 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006438 out,
Alexey Frunze15958152017-02-09 19:08:30 -08006439 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006440 kCompilerReadBarrierOption,
6441 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006442 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006443 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006444 codegen_->AddSlowPath(slow_path);
6445 __ Beqzc(out, slow_path->GetEntryLabel());
6446 __ Bind(slow_path->GetExitLabel());
6447 return;
6448 }
Vladimir Marko8e524ad2018-07-13 10:27:43 +01006449 case HLoadString::LoadKind::kJitBootImageAddress: {
6450 uint32_t address = reinterpret_cast32<uint32_t>(load->GetString().Get());
6451 DCHECK_NE(address, 0u);
6452 __ LoadLiteral(out,
6453 kLoadUnsignedWord,
6454 codegen_->DeduplicateBootImageAddressLiteral(address));
6455 return;
6456 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006457 case HLoadString::LoadKind::kJitTableAddress:
6458 __ LoadLiteral(out,
6459 kLoadUnsignedWord,
6460 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6461 load->GetStringIndex(),
6462 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006463 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006464 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006465 default:
6466 break;
6467 }
6468
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006469 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006470 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006471 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006472 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006473 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6474 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6475 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006476}
6477
Alexey Frunze4dda3372015-06-01 18:31:49 -07006478void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006479 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006480 locations->SetOut(Location::ConstantLocation(constant));
6481}
6482
6483void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6484 // Will be generated at use site.
6485}
6486
6487void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006488 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6489 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006490 InvokeRuntimeCallingConvention calling_convention;
6491 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6492}
6493
6494void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006495 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006496 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006497 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006498 if (instruction->IsEnter()) {
6499 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6500 } else {
6501 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6502 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006503}
6504
6505void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6506 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006507 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006508 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006509 case DataType::Type::kInt32:
6510 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006511 locations->SetInAt(0, Location::RequiresRegister());
6512 locations->SetInAt(1, Location::RequiresRegister());
6513 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6514 break;
6515
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006516 case DataType::Type::kFloat32:
6517 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006518 locations->SetInAt(0, Location::RequiresFpuRegister());
6519 locations->SetInAt(1, Location::RequiresFpuRegister());
6520 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6521 break;
6522
6523 default:
6524 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6525 }
6526}
6527
6528void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006529 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006530 LocationSummary* locations = instruction->GetLocations();
6531
6532 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006533 case DataType::Type::kInt32:
6534 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006535 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6536 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6537 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006538 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006539 __ MulR6(dst, lhs, rhs);
6540 else
6541 __ Dmul(dst, lhs, rhs);
6542 break;
6543 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006544 case DataType::Type::kFloat32:
6545 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006546 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6547 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6548 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006549 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006550 __ MulS(dst, lhs, rhs);
6551 else
6552 __ MulD(dst, lhs, rhs);
6553 break;
6554 }
6555 default:
6556 LOG(FATAL) << "Unexpected mul type " << type;
6557 }
6558}
6559
6560void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6561 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006562 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006563 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006564 case DataType::Type::kInt32:
6565 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006566 locations->SetInAt(0, Location::RequiresRegister());
6567 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6568 break;
6569
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006570 case DataType::Type::kFloat32:
6571 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006572 locations->SetInAt(0, Location::RequiresFpuRegister());
6573 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6574 break;
6575
6576 default:
6577 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6578 }
6579}
6580
6581void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006582 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006583 LocationSummary* locations = instruction->GetLocations();
6584
6585 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006586 case DataType::Type::kInt32:
6587 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006588 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6589 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006590 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006591 __ Subu(dst, ZERO, src);
6592 else
6593 __ Dsubu(dst, ZERO, src);
6594 break;
6595 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006596 case DataType::Type::kFloat32:
6597 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006598 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6599 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006600 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006601 __ NegS(dst, src);
6602 else
6603 __ NegD(dst, src);
6604 break;
6605 }
6606 default:
6607 LOG(FATAL) << "Unexpected neg type " << type;
6608 }
6609}
6610
6611void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006612 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6613 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006614 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006615 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006616 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6617 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006618}
6619
6620void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006621 // Note: if heap poisoning is enabled, the entry point takes care
6622 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006623 QuickEntrypointEnum entrypoint =
6624 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6625 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006626 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006627 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006628}
6629
6630void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006631 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6632 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006633 InvokeRuntimeCallingConvention calling_convention;
Alex Lightd109e302018-06-27 10:25:41 -07006634 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006635 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006636}
6637
6638void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alex Lightd109e302018-06-27 10:25:41 -07006639 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
6640 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006641}
6642
6643void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006644 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006645 locations->SetInAt(0, Location::RequiresRegister());
6646 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6647}
6648
6649void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006650 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006651 LocationSummary* locations = instruction->GetLocations();
6652
6653 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006654 case DataType::Type::kInt32:
6655 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006656 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6657 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6658 __ Nor(dst, src, ZERO);
6659 break;
6660 }
6661
6662 default:
6663 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6664 }
6665}
6666
6667void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006668 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006669 locations->SetInAt(0, Location::RequiresRegister());
6670 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6671}
6672
6673void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6674 LocationSummary* locations = instruction->GetLocations();
6675 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6676 locations->InAt(0).AsRegister<GpuRegister>(),
6677 1);
6678}
6679
6680void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006681 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6682 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006683}
6684
Calin Juravle2ae48182016-03-16 14:05:09 +00006685void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6686 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006687 return;
6688 }
6689 Location obj = instruction->GetLocations()->InAt(0);
6690
6691 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006692 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006693}
6694
Calin Juravle2ae48182016-03-16 14:05:09 +00006695void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006696 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006697 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006698 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006699
6700 Location obj = instruction->GetLocations()->InAt(0);
6701
6702 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6703}
6704
6705void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006706 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006707}
6708
6709void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6710 HandleBinaryOp(instruction);
6711}
6712
6713void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6714 HandleBinaryOp(instruction);
6715}
6716
6717void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6718 LOG(FATAL) << "Unreachable";
6719}
6720
6721void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006722 if (instruction->GetNext()->IsSuspendCheck() &&
6723 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6724 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6725 // The back edge will generate the suspend check.
6726 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6727 }
6728
Alexey Frunze4dda3372015-06-01 18:31:49 -07006729 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6730}
6731
6732void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006733 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006734 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6735 if (location.IsStackSlot()) {
6736 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6737 } else if (location.IsDoubleStackSlot()) {
6738 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6739 }
6740 locations->SetOut(location);
6741}
6742
6743void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6744 ATTRIBUTE_UNUSED) {
6745 // Nothing to do, the parameter is already at its location.
6746}
6747
6748void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6749 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006750 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006751 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6752}
6753
6754void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6755 ATTRIBUTE_UNUSED) {
6756 // Nothing to do, the method is already at its location.
6757}
6758
6759void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006760 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006761 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006762 locations->SetInAt(i, Location::Any());
6763 }
6764 locations->SetOut(Location::Any());
6765}
6766
6767void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6768 LOG(FATAL) << "Unreachable";
6769}
6770
6771void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006772 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006773 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006774 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6775 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006776 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006777
6778 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006779 case DataType::Type::kInt32:
6780 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006781 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006782 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006783 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6784 break;
6785
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006786 case DataType::Type::kFloat32:
6787 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006788 InvokeRuntimeCallingConvention calling_convention;
6789 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6790 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6791 locations->SetOut(calling_convention.GetReturnLocation(type));
6792 break;
6793 }
6794
6795 default:
6796 LOG(FATAL) << "Unexpected rem type " << type;
6797 }
6798}
6799
6800void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006801 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006802
6803 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006804 case DataType::Type::kInt32:
6805 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006806 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006807 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006808
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006809 case DataType::Type::kFloat32:
6810 case DataType::Type::kFloat64: {
6811 QuickEntrypointEnum entrypoint =
6812 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006813 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006814 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006815 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6816 } else {
6817 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6818 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006819 break;
6820 }
6821 default:
6822 LOG(FATAL) << "Unexpected rem type " << type;
6823 }
6824}
6825
Aart Bik1f8d51b2018-02-15 10:42:37 -08006826static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
6827 LocationSummary* locations = new (allocator) LocationSummary(minmax);
6828 switch (minmax->GetResultType()) {
6829 case DataType::Type::kInt32:
6830 case DataType::Type::kInt64:
6831 locations->SetInAt(0, Location::RequiresRegister());
6832 locations->SetInAt(1, Location::RequiresRegister());
6833 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6834 break;
6835 case DataType::Type::kFloat32:
6836 case DataType::Type::kFloat64:
6837 locations->SetInAt(0, Location::RequiresFpuRegister());
6838 locations->SetInAt(1, Location::RequiresFpuRegister());
6839 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6840 break;
6841 default:
6842 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
6843 }
6844}
6845
Aart Bik351df3e2018-03-07 11:54:57 -08006846void InstructionCodeGeneratorMIPS64::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08006847 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6848 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6849 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6850
6851 if (lhs == rhs) {
6852 if (out != lhs) {
6853 __ Move(out, lhs);
6854 }
6855 } else {
6856 // Some architectures, such as ARM and MIPS (prior to r6), have a
6857 // conditional move instruction which only changes the target
6858 // (output) register if the condition is true (MIPS prior to r6 had
6859 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
6860 // change the target (output) register. If the condition is true the
6861 // output register gets the contents of the "rs" register; otherwise,
6862 // the output register is set to zero. One consequence of this is
6863 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
6864 // needs to use a pair of SELEQZ/SELNEZ instructions. After
6865 // executing this pair of instructions one of the output registers
6866 // from the pair will necessarily contain zero. Then the code ORs the
6867 // output registers from the SELEQZ/SELNEZ instructions to get the
6868 // final result.
6869 //
6870 // The initial test to see if the output register is same as the
6871 // first input register is needed to make sure that value in the
6872 // first input register isn't clobbered before we've finished
6873 // computing the output value. The logic in the corresponding else
6874 // clause performs the same task but makes sure the second input
6875 // register isn't clobbered in the event that it's the same register
6876 // as the output register; the else clause also handles the case
6877 // where the output register is distinct from both the first, and the
6878 // second input registers.
6879 if (out == lhs) {
6880 __ Slt(AT, rhs, lhs);
6881 if (is_min) {
6882 __ Seleqz(out, lhs, AT);
6883 __ Selnez(AT, rhs, AT);
6884 } else {
6885 __ Selnez(out, lhs, AT);
6886 __ Seleqz(AT, rhs, AT);
6887 }
6888 } else {
6889 __ Slt(AT, lhs, rhs);
6890 if (is_min) {
6891 __ Seleqz(out, rhs, AT);
6892 __ Selnez(AT, lhs, AT);
6893 } else {
6894 __ Selnez(out, rhs, AT);
6895 __ Seleqz(AT, lhs, AT);
6896 }
6897 }
6898 __ Or(out, out, AT);
6899 }
6900}
6901
6902void InstructionCodeGeneratorMIPS64::GenerateMinMaxFP(LocationSummary* locations,
6903 bool is_min,
6904 DataType::Type type) {
6905 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
6906 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
6907 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
6908
6909 Mips64Label noNaNs;
6910 Mips64Label done;
6911 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
6912
6913 // When Java computes min/max it prefers a NaN to a number; the
6914 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
6915 // the inputs is a NaN and the other is a valid number, the MIPS
6916 // instruction will return the number; Java wants the NaN value
6917 // returned. This is why there is extra logic preceding the use of
6918 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
6919 // NaN, return the NaN, otherwise return the min/max.
6920 if (type == DataType::Type::kFloat64) {
6921 __ CmpUnD(FTMP, a, b);
6922 __ Bc1eqz(FTMP, &noNaNs);
6923
6924 // One of the inputs is a NaN
6925 __ CmpEqD(ftmp, a, a);
6926 // If a == a then b is the NaN, otherwise a is the NaN.
6927 __ SelD(ftmp, a, b);
6928
6929 if (ftmp != out) {
6930 __ MovD(out, ftmp);
6931 }
6932
6933 __ Bc(&done);
6934
6935 __ Bind(&noNaNs);
6936
6937 if (is_min) {
6938 __ MinD(out, a, b);
6939 } else {
6940 __ MaxD(out, a, b);
6941 }
6942 } else {
6943 DCHECK_EQ(type, DataType::Type::kFloat32);
6944 __ CmpUnS(FTMP, a, b);
6945 __ Bc1eqz(FTMP, &noNaNs);
6946
6947 // One of the inputs is a NaN
6948 __ CmpEqS(ftmp, a, a);
6949 // If a == a then b is the NaN, otherwise a is the NaN.
6950 __ SelS(ftmp, a, b);
6951
6952 if (ftmp != out) {
6953 __ MovS(out, ftmp);
6954 }
6955
6956 __ Bc(&done);
6957
6958 __ Bind(&noNaNs);
6959
6960 if (is_min) {
6961 __ MinS(out, a, b);
6962 } else {
6963 __ MaxS(out, a, b);
6964 }
6965 }
6966
6967 __ Bind(&done);
6968}
6969
Aart Bik351df3e2018-03-07 11:54:57 -08006970void InstructionCodeGeneratorMIPS64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
6971 DataType::Type type = minmax->GetResultType();
6972 switch (type) {
6973 case DataType::Type::kInt32:
6974 case DataType::Type::kInt64:
6975 GenerateMinMaxInt(minmax->GetLocations(), is_min);
6976 break;
6977 case DataType::Type::kFloat32:
6978 case DataType::Type::kFloat64:
6979 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
6980 break;
6981 default:
6982 LOG(FATAL) << "Unexpected type for HMinMax " << type;
6983 }
6984}
6985
Aart Bik1f8d51b2018-02-15 10:42:37 -08006986void LocationsBuilderMIPS64::VisitMin(HMin* min) {
6987 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
6988}
6989
6990void InstructionCodeGeneratorMIPS64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08006991 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08006992}
6993
6994void LocationsBuilderMIPS64::VisitMax(HMax* max) {
6995 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
6996}
6997
6998void InstructionCodeGeneratorMIPS64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08006999 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08007000}
7001
Aart Bik3dad3412018-02-28 12:01:46 -08007002void LocationsBuilderMIPS64::VisitAbs(HAbs* abs) {
7003 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
7004 switch (abs->GetResultType()) {
7005 case DataType::Type::kInt32:
7006 case DataType::Type::kInt64:
7007 locations->SetInAt(0, Location::RequiresRegister());
7008 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7009 break;
7010 case DataType::Type::kFloat32:
7011 case DataType::Type::kFloat64:
7012 locations->SetInAt(0, Location::RequiresFpuRegister());
7013 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7014 break;
7015 default:
7016 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7017 }
7018}
7019
7020void InstructionCodeGeneratorMIPS64::VisitAbs(HAbs* abs) {
7021 LocationSummary* locations = abs->GetLocations();
7022 switch (abs->GetResultType()) {
7023 case DataType::Type::kInt32: {
7024 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7025 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7026 __ Sra(AT, in, 31);
7027 __ Xor(out, in, AT);
7028 __ Subu(out, out, AT);
7029 break;
7030 }
7031 case DataType::Type::kInt64: {
7032 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
7033 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
7034 __ Dsra32(AT, in, 31);
7035 __ Xor(out, in, AT);
7036 __ Dsubu(out, out, AT);
7037 break;
7038 }
7039 case DataType::Type::kFloat32: {
7040 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7041 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7042 __ AbsS(out, in);
7043 break;
7044 }
7045 case DataType::Type::kFloat64: {
7046 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7047 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7048 __ AbsD(out, in);
7049 break;
7050 }
7051 default:
7052 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7053 }
7054}
7055
Igor Murashkind01745e2017-04-05 16:40:31 -07007056void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
7057 constructor_fence->SetLocations(nullptr);
7058}
7059
7060void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
7061 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
7062 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
7063}
7064
Alexey Frunze4dda3372015-06-01 18:31:49 -07007065void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7066 memory_barrier->SetLocations(nullptr);
7067}
7068
7069void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7070 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
7071}
7072
7073void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007074 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007075 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007076 locations->SetInAt(0, Mips64ReturnLocation(return_type));
7077}
7078
7079void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
7080 codegen_->GenerateFrameExit();
7081}
7082
7083void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
7084 ret->SetLocations(nullptr);
7085}
7086
7087void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
7088 codegen_->GenerateFrameExit();
7089}
7090
Alexey Frunze92d90602015-12-18 18:16:36 -08007091void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
7092 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007093}
7094
Alexey Frunze92d90602015-12-18 18:16:36 -08007095void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
7096 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007097}
7098
Alexey Frunze4dda3372015-06-01 18:31:49 -07007099void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
7100 HandleShift(shl);
7101}
7102
7103void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
7104 HandleShift(shl);
7105}
7106
7107void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
7108 HandleShift(shr);
7109}
7110
7111void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
7112 HandleShift(shr);
7113}
7114
Alexey Frunze4dda3372015-06-01 18:31:49 -07007115void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
7116 HandleBinaryOp(instruction);
7117}
7118
7119void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
7120 HandleBinaryOp(instruction);
7121}
7122
7123void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7124 HandleFieldGet(instruction, instruction->GetFieldInfo());
7125}
7126
7127void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7128 HandleFieldGet(instruction, instruction->GetFieldInfo());
7129}
7130
7131void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
7132 HandleFieldSet(instruction, instruction->GetFieldInfo());
7133}
7134
7135void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01007136 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007137}
7138
Calin Juravlee460d1d2015-09-29 04:52:17 +01007139void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
7140 HUnresolvedInstanceFieldGet* instruction) {
7141 FieldAccessCallingConventionMIPS64 calling_convention;
7142 codegen_->CreateUnresolvedFieldLocationSummary(
7143 instruction, instruction->GetFieldType(), calling_convention);
7144}
7145
7146void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
7147 HUnresolvedInstanceFieldGet* instruction) {
7148 FieldAccessCallingConventionMIPS64 calling_convention;
7149 codegen_->GenerateUnresolvedFieldAccess(instruction,
7150 instruction->GetFieldType(),
7151 instruction->GetFieldIndex(),
7152 instruction->GetDexPc(),
7153 calling_convention);
7154}
7155
7156void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
7157 HUnresolvedInstanceFieldSet* instruction) {
7158 FieldAccessCallingConventionMIPS64 calling_convention;
7159 codegen_->CreateUnresolvedFieldLocationSummary(
7160 instruction, instruction->GetFieldType(), calling_convention);
7161}
7162
7163void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
7164 HUnresolvedInstanceFieldSet* instruction) {
7165 FieldAccessCallingConventionMIPS64 calling_convention;
7166 codegen_->GenerateUnresolvedFieldAccess(instruction,
7167 instruction->GetFieldType(),
7168 instruction->GetFieldIndex(),
7169 instruction->GetDexPc(),
7170 calling_convention);
7171}
7172
7173void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
7174 HUnresolvedStaticFieldGet* instruction) {
7175 FieldAccessCallingConventionMIPS64 calling_convention;
7176 codegen_->CreateUnresolvedFieldLocationSummary(
7177 instruction, instruction->GetFieldType(), calling_convention);
7178}
7179
7180void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
7181 HUnresolvedStaticFieldGet* instruction) {
7182 FieldAccessCallingConventionMIPS64 calling_convention;
7183 codegen_->GenerateUnresolvedFieldAccess(instruction,
7184 instruction->GetFieldType(),
7185 instruction->GetFieldIndex(),
7186 instruction->GetDexPc(),
7187 calling_convention);
7188}
7189
7190void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
7191 HUnresolvedStaticFieldSet* instruction) {
7192 FieldAccessCallingConventionMIPS64 calling_convention;
7193 codegen_->CreateUnresolvedFieldLocationSummary(
7194 instruction, instruction->GetFieldType(), calling_convention);
7195}
7196
7197void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
7198 HUnresolvedStaticFieldSet* instruction) {
7199 FieldAccessCallingConventionMIPS64 calling_convention;
7200 codegen_->GenerateUnresolvedFieldAccess(instruction,
7201 instruction->GetFieldType(),
7202 instruction->GetFieldIndex(),
7203 instruction->GetDexPc(),
7204 calling_convention);
7205}
7206
Alexey Frunze4dda3372015-06-01 18:31:49 -07007207void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007208 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7209 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02007210 // In suspend check slow path, usually there are no caller-save registers at all.
7211 // If SIMD instructions are present, however, we force spilling all live SIMD
7212 // registers in full width (since the runtime only saves/restores lower part).
7213 locations->SetCustomSlowPathCallerSaves(
7214 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007215}
7216
7217void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
7218 HBasicBlock* block = instruction->GetBlock();
7219 if (block->GetLoopInformation() != nullptr) {
7220 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
7221 // The back edge will generate the suspend check.
7222 return;
7223 }
7224 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
7225 // The goto will generate the suspend check.
7226 return;
7227 }
7228 GenerateSuspendCheck(instruction, nullptr);
7229}
7230
Alexey Frunze4dda3372015-06-01 18:31:49 -07007231void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007232 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7233 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007234 InvokeRuntimeCallingConvention calling_convention;
7235 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7236}
7237
7238void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01007239 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007240 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7241}
7242
7243void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007244 DataType::Type input_type = conversion->GetInputType();
7245 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007246 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7247 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007248
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007249 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
7250 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007251 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
7252 }
7253
Vladimir Markoca6fff82017-10-03 14:49:14 +01007254 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007255
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007256 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007257 locations->SetInAt(0, Location::RequiresFpuRegister());
7258 } else {
7259 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007260 }
7261
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007262 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007263 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007264 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007265 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007266 }
7267}
7268
7269void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
7270 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007271 DataType::Type result_type = conversion->GetResultType();
7272 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007273
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007274 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7275 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007276
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007277 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007278 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7279 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
7280
7281 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007282 case DataType::Type::kUint8:
7283 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007284 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007285 case DataType::Type::kInt8:
7286 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007287 // Type conversion from long to types narrower than int is a result of code
7288 // transformations. To avoid unpredictable results for SEB and SEH, we first
7289 // need to sign-extend the low 32-bit value into bits 32 through 63.
7290 __ Sll(dst, src, 0);
7291 __ Seb(dst, dst);
7292 } else {
7293 __ Seb(dst, src);
7294 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007295 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007296 case DataType::Type::kUint16:
7297 __ Andi(dst, src, 0xFFFF);
7298 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007299 case DataType::Type::kInt16:
7300 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007301 // Type conversion from long to types narrower than int is a result of code
7302 // transformations. To avoid unpredictable results for SEB and SEH, we first
7303 // need to sign-extend the low 32-bit value into bits 32 through 63.
7304 __ Sll(dst, src, 0);
7305 __ Seh(dst, dst);
7306 } else {
7307 __ Seh(dst, src);
7308 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007309 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007310 case DataType::Type::kInt32:
7311 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007312 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
7313 // conversions, except when the input and output registers are the same and we are not
7314 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007315 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007316 __ Sll(dst, src, 0);
7317 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007318 break;
7319
7320 default:
7321 LOG(FATAL) << "Unexpected type conversion from " << input_type
7322 << " to " << result_type;
7323 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007324 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007325 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7326 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007327 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007328 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007329 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007330 __ Cvtsl(dst, FTMP);
7331 } else {
7332 __ Cvtdl(dst, FTMP);
7333 }
7334 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007335 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007336 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007337 __ Cvtsw(dst, FTMP);
7338 } else {
7339 __ Cvtdw(dst, FTMP);
7340 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007341 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007342 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
7343 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007344 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7345 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007346
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007347 if (result_type == DataType::Type::kInt64) {
7348 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007349 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007350 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007351 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007352 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007353 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007354 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007355 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007356 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007357 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007358 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007359 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007360 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007361 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007362 } else if (DataType::IsFloatingPointType(result_type) &&
7363 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007364 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7365 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007366 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007367 __ Cvtsd(dst, src);
7368 } else {
7369 __ Cvtds(dst, src);
7370 }
7371 } else {
7372 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
7373 << " to " << result_type;
7374 }
7375}
7376
7377void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
7378 HandleShift(ushr);
7379}
7380
7381void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
7382 HandleShift(ushr);
7383}
7384
7385void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
7386 HandleBinaryOp(instruction);
7387}
7388
7389void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
7390 HandleBinaryOp(instruction);
7391}
7392
7393void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7394 // Nothing to do, this should be removed during prepare for register allocator.
7395 LOG(FATAL) << "Unreachable";
7396}
7397
7398void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7399 // Nothing to do, this should be removed during prepare for register allocator.
7400 LOG(FATAL) << "Unreachable";
7401}
7402
7403void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007404 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007405}
7406
7407void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007408 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007409}
7410
7411void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007412 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007413}
7414
7415void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007416 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007417}
7418
7419void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007420 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007421}
7422
7423void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007424 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007425}
7426
7427void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007428 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007429}
7430
7431void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007432 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007433}
7434
7435void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007436 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007437}
7438
7439void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007440 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007441}
7442
7443void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007444 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007445}
7446
7447void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007448 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007449}
7450
Aart Bike9f37602015-10-09 11:15:55 -07007451void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007452 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007453}
7454
7455void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007456 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007457}
7458
7459void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007460 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007461}
7462
7463void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007464 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007465}
7466
7467void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007468 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007469}
7470
7471void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007472 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007473}
7474
7475void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007476 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007477}
7478
7479void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007480 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007481}
7482
Mark Mendellfe57faa2015-09-18 09:26:15 -04007483// Simple implementation of packed switch - generate cascaded compare/jumps.
7484void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7485 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007486 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007487 locations->SetInAt(0, Location::RequiresRegister());
7488}
7489
Alexey Frunze0960ac52016-12-20 17:24:59 -08007490void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7491 int32_t lower_bound,
7492 uint32_t num_entries,
7493 HBasicBlock* switch_block,
7494 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007495 // Create a set of compare/jumps.
7496 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007497 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007498 // Jump to default if index is negative
7499 // Note: We don't check the case that index is positive while value < lower_bound, because in
7500 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7501 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7502
Alexey Frunze0960ac52016-12-20 17:24:59 -08007503 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007504 // Jump to successors[0] if value == lower_bound.
7505 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7506 int32_t last_index = 0;
7507 for (; num_entries - last_index > 2; last_index += 2) {
7508 __ Addiu(temp_reg, temp_reg, -2);
7509 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7510 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7511 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7512 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7513 }
7514 if (num_entries - last_index == 2) {
7515 // The last missing case_value.
7516 __ Addiu(temp_reg, temp_reg, -1);
7517 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007518 }
7519
7520 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007521 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007522 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007523 }
7524}
7525
Alexey Frunze0960ac52016-12-20 17:24:59 -08007526void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7527 int32_t lower_bound,
7528 uint32_t num_entries,
7529 HBasicBlock* switch_block,
7530 HBasicBlock* default_block) {
7531 // Create a jump table.
7532 std::vector<Mips64Label*> labels(num_entries);
7533 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7534 for (uint32_t i = 0; i < num_entries; i++) {
7535 labels[i] = codegen_->GetLabelOf(successors[i]);
7536 }
7537 JumpTable* table = __ CreateJumpTable(std::move(labels));
7538
7539 // Is the value in range?
7540 __ Addiu32(TMP, value_reg, -lower_bound);
7541 __ LoadConst32(AT, num_entries);
7542 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7543
7544 // We are in the range of the table.
7545 // Load the target address from the jump table, indexing by the value.
7546 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007547 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007548 __ Lw(TMP, TMP, 0);
7549 // Compute the absolute target address by adding the table start address
7550 // (the table contains offsets to targets relative to its start).
7551 __ Daddu(TMP, TMP, AT);
7552 // And jump.
7553 __ Jr(TMP);
7554 __ Nop();
7555}
7556
7557void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7558 int32_t lower_bound = switch_instr->GetStartValue();
7559 uint32_t num_entries = switch_instr->GetNumEntries();
7560 LocationSummary* locations = switch_instr->GetLocations();
7561 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7562 HBasicBlock* switch_block = switch_instr->GetBlock();
7563 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7564
7565 if (num_entries > kPackedSwitchJumpTableThreshold) {
7566 GenTableBasedPackedSwitch(value_reg,
7567 lower_bound,
7568 num_entries,
7569 switch_block,
7570 default_block);
7571 } else {
7572 GenPackedSwitchWithCompares(value_reg,
7573 lower_bound,
7574 num_entries,
7575 switch_block,
7576 default_block);
7577 }
7578}
7579
Chris Larsenc9905a62017-03-13 17:06:18 -07007580void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7581 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007582 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007583 locations->SetInAt(0, Location::RequiresRegister());
7584 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007585}
7586
Chris Larsenc9905a62017-03-13 17:06:18 -07007587void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7588 LocationSummary* locations = instruction->GetLocations();
7589 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7590 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7591 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7592 __ LoadFromOffset(kLoadDoubleword,
7593 locations->Out().AsRegister<GpuRegister>(),
7594 locations->InAt(0).AsRegister<GpuRegister>(),
7595 method_offset);
7596 } else {
7597 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7598 instruction->GetIndex(), kMips64PointerSize));
7599 __ LoadFromOffset(kLoadDoubleword,
7600 locations->Out().AsRegister<GpuRegister>(),
7601 locations->InAt(0).AsRegister<GpuRegister>(),
7602 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7603 __ LoadFromOffset(kLoadDoubleword,
7604 locations->Out().AsRegister<GpuRegister>(),
7605 locations->Out().AsRegister<GpuRegister>(),
7606 method_offset);
7607 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007608}
7609
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007610void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7611 ATTRIBUTE_UNUSED) {
7612 LOG(FATAL) << "Unreachable";
7613}
7614
7615void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7616 ATTRIBUTE_UNUSED) {
7617 LOG(FATAL) << "Unreachable";
7618}
7619
Alexey Frunze4dda3372015-06-01 18:31:49 -07007620} // namespace mips64
7621} // namespace art