blob: 6657582e2ae3ad0fec259dd5862c4f56e1647221 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010034#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070037#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070038#include "utils/stack_checks.h"
39
40namespace art {
41namespace mips64 {
42
43static constexpr int kCurrentMethodStackOffset = 0;
44static constexpr GpuRegister kMethodRegisterArgument = A0;
45
Alexey Frunze4147fcc2017-06-17 19:57:27 -070046// Flags controlling the use of thunks for Baker read barriers.
47constexpr bool kBakerReadBarrierThunksEnableForFields = true;
48constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
49constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
50
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010051Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070052 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010053 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010054 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 case DataType::Type::kInt8:
56 case DataType::Type::kUint16:
57 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080058 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 case DataType::Type::kInt32:
60 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080061 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010062 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070063 return Location::RegisterLocation(V0);
64
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 case DataType::Type::kFloat32:
66 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070067 return Location::FpuRegisterLocation(F0);
68
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070070 return Location();
71 }
72 UNREACHABLE();
73}
74
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010075Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070076 return Mips64ReturnLocation(type);
77}
78
79Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
80 return Location::RegisterLocation(kMethodRegisterArgument);
81}
82
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010083Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070084 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010085 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070086 LOG(FATAL) << "Unexpected parameter type " << type;
87 }
88
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010089 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070090 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
91 next_location = Location::FpuRegisterLocation(
92 calling_convention.GetFpuRegisterAt(float_index_++));
93 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010094 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070095 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
96 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
97 float_index_++;
98 } else {
99 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100100 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
101 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700102 }
103
104 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100105 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700106
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107 return next_location;
108}
109
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100110Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111 return Mips64ReturnLocation(type);
112}
113
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100114// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
115#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700116#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117
118class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
119 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121
122 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
125 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000126 if (instruction_->CanThrowIntoCatchBlock()) {
127 // Live registers will be restored in the catch block if caught.
128 SaveLiveRegisters(codegen, instruction_->GetLocations());
129 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
132 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100133 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100136 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700137 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100138 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100139 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
140 ? kQuickThrowStringBounds
141 : kQuickThrowArrayBounds;
142 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100143 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700144 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
145 }
146
Alexandre Rames8158f282015-08-07 10:26:17 +0100147 bool IsFatal() const OVERRIDE { return true; }
148
Roland Levillain46648892015-06-19 16:07:18 +0100149 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
150
Alexey Frunze4dda3372015-06-01 18:31:49 -0700151 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700152 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
153};
154
155class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
156 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700157 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
158 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
162 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100163 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700164 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
165 }
166
Alexandre Rames8158f282015-08-07 10:26:17 +0100167 bool IsFatal() const OVERRIDE { return true; }
168
Roland Levillain46648892015-06-19 16:07:18 +0100169 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
170
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
173};
174
175class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
176 public:
177 LoadClassSlowPathMIPS64(HLoadClass* cls,
178 HInstruction* at,
179 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000180 bool do_clinit)
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700181 : SlowPathCodeMIPS64(at),
182 cls_(cls),
183 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000184 do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
186 }
187
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000189 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700192 InvokeRuntimeCallingConvention calling_convention;
193 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700194 __ Bind(GetEntryLabel());
195 SaveLiveRegisters(codegen, locations);
196
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000197 dex::TypeIndex type_index = cls_->GetTypeIndex();
198 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100199 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
200 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000201 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700202 if (do_clinit_) {
203 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
204 } else {
205 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
206 }
207
208 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700209 if (out.IsValid()) {
210 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100211 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700212 mips64_codegen->MoveLocation(out,
213 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
214 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700215 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700218 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700219 }
220
Roland Levillain46648892015-06-19 16:07:18 +0100221 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
222
Alexey Frunze4dda3372015-06-01 18:31:49 -0700223 private:
224 // The class this slow path will load.
225 HLoadClass* const cls_;
226
Alexey Frunze4dda3372015-06-01 18:31:49 -0700227 // The dex PC of `at_`.
228 const uint32_t dex_pc_;
229
230 // Whether to initialize the class.
231 const bool do_clinit_;
232
233 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
234};
235
236class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
237 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000238 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
239 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240
241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700242 DCHECK(instruction_->IsLoadString());
243 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700244 LocationSummary* locations = instruction_->GetLocations();
245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000246 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700247 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700248 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249 __ Bind(GetEntryLabel());
250 SaveLiveRegisters(codegen, locations);
251
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000252 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100253 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700254 instruction_,
255 instruction_->GetDexPc(),
256 this);
257 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700258
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100259 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700260 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700261 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800264
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700265 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 }
267
Roland Levillain46648892015-06-19 16:07:18 +0100268 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
269
Alexey Frunze4dda3372015-06-01 18:31:49 -0700270 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
272};
273
274class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
275 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000276 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700277
278 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
279 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
280 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000281 if (instruction_->CanThrowIntoCatchBlock()) {
282 // Live registers will be restored in the catch block if caught.
283 SaveLiveRegisters(codegen, instruction_->GetLocations());
284 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100285 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286 instruction_,
287 instruction_->GetDexPc(),
288 this);
289 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
290 }
291
Alexandre Rames8158f282015-08-07 10:26:17 +0100292 bool IsFatal() const OVERRIDE { return true; }
293
Roland Levillain46648892015-06-19 16:07:18 +0100294 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
295
Alexey Frunze4dda3372015-06-01 18:31:49 -0700296 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
298};
299
300class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
301 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100302 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000303 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700304
305 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200306 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700307 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
308 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200309 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100310 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700311 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200312 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700314 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700315 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700316 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700317 }
318 }
319
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700320 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700321 DCHECK(successor_ == nullptr);
322 return &return_label_;
323 }
324
Roland Levillain46648892015-06-19 16:07:18 +0100325 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
326
Chris Larsena2045912017-11-02 12:39:54 -0700327 HBasicBlock* GetSuccessor() const {
328 return successor_;
329 }
330
Alexey Frunze4dda3372015-06-01 18:31:49 -0700331 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700332 // If not null, the block to branch to after the suspend check.
333 HBasicBlock* const successor_;
334
335 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700336 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337
338 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
339};
340
341class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
342 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800343 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
344 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700345
346 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
347 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800348
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100349 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 DCHECK(instruction_->IsCheckCast()
351 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
352 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
353
354 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800355 if (!is_fatal_) {
356 SaveLiveRegisters(codegen, locations);
357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358
359 // We're moving two locations to locations that could overlap, so we need a parallel
360 // move resolver.
361 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700363 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100364 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100369 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800370 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100371 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
373 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700378 }
379
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800380 if (!is_fatal_) {
381 RestoreLiveRegisters(codegen, locations);
382 __ Bc(GetExitLabel());
383 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700384 }
385
Roland Levillain46648892015-06-19 16:07:18 +0100386 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
387
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800388 bool IsFatal() const OVERRIDE { return is_fatal_; }
389
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800391 const bool is_fatal_;
392
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
394};
395
396class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
397 public:
Aart Bik42249c32016-01-07 15:33:50 -0800398 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000399 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400
401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800402 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700403 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 LocationSummary* locations = instruction_->GetLocations();
405 SaveLiveRegisters(codegen, locations);
406 InvokeRuntimeCallingConvention calling_convention;
407 __ LoadConst32(calling_convention.GetRegisterAt(0),
408 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100409 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100410 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700411 }
412
Roland Levillain46648892015-06-19 16:07:18 +0100413 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
414
Alexey Frunze4dda3372015-06-01 18:31:49 -0700415 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
417};
418
Alexey Frunze15958152017-02-09 19:08:30 -0800419class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
420 public:
421 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
422
423 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
424 LocationSummary* locations = instruction_->GetLocations();
425 __ Bind(GetEntryLabel());
426 SaveLiveRegisters(codegen, locations);
427
428 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100429 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800430 parallel_move.AddMove(
431 locations->InAt(0),
432 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100433 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800434 nullptr);
435 parallel_move.AddMove(
436 locations->InAt(1),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(2),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800444 nullptr);
445 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
446
447 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
448 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
449 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
450 RestoreLiveRegisters(codegen, locations);
451 __ Bc(GetExitLabel());
452 }
453
454 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
455
456 private:
457 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
458};
459
460// Slow path marking an object reference `ref` during a read
461// barrier. The field `obj.field` in the object `obj` holding this
462// reference does not get updated by this slow path after marking (see
463// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
464//
465// This means that after the execution of this slow path, `ref` will
466// always be up-to-date, but `obj.field` may not; i.e., after the
467// flip, `ref` will be a to-space reference, but `obj.field` will
468// probably still be a from-space reference (unless it gets updated by
469// another thread, or if another thread installed another object
470// reference (different from `ref`) in `obj.field`).
471//
472// If `entrypoint` is a valid location it is assumed to already be
473// holding the entrypoint. The case where the entrypoint is passed in
474// is for the GcRoot read barrier.
475class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
476 public:
477 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
478 Location ref,
479 Location entrypoint = Location::NoLocation())
480 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
481 DCHECK(kEmitCompilerReadBarrier);
482 }
483
484 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
485
486 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
487 LocationSummary* locations = instruction_->GetLocations();
488 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
489 DCHECK(locations->CanCall());
490 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
491 DCHECK(instruction_->IsInstanceFieldGet() ||
492 instruction_->IsStaticFieldGet() ||
493 instruction_->IsArrayGet() ||
494 instruction_->IsArraySet() ||
495 instruction_->IsLoadClass() ||
496 instruction_->IsLoadString() ||
497 instruction_->IsInstanceOf() ||
498 instruction_->IsCheckCast() ||
499 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
500 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
501 << "Unexpected instruction in read barrier marking slow path: "
502 << instruction_->DebugName();
503
504 __ Bind(GetEntryLabel());
505 // No need to save live registers; it's taken care of by the
506 // entrypoint. Also, there is no need to update the stack mask,
507 // as this runtime call will not trigger a garbage collection.
508 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
509 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
510 (S2 <= ref_reg && ref_reg <= S7) ||
511 (ref_reg == S8)) << ref_reg;
512 // "Compact" slow path, saving two moves.
513 //
514 // Instead of using the standard runtime calling convention (input
515 // and output in A0 and V0 respectively):
516 //
517 // A0 <- ref
518 // V0 <- ReadBarrierMark(A0)
519 // ref <- V0
520 //
521 // we just use rX (the register containing `ref`) as input and output
522 // of a dedicated entrypoint:
523 //
524 // rX <- ReadBarrierMarkRegX(rX)
525 //
526 if (entrypoint_.IsValid()) {
527 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
528 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
529 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
530 __ Nop();
531 } else {
532 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100533 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800534 // This runtime call does not require a stack map.
535 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
536 instruction_,
537 this);
538 }
539 __ Bc(GetExitLabel());
540 }
541
542 private:
543 // The location (register) of the marked object reference.
544 const Location ref_;
545
546 // The location of the entrypoint if already loaded.
547 const Location entrypoint_;
548
549 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
550};
551
552// Slow path marking an object reference `ref` during a read barrier,
553// and if needed, atomically updating the field `obj.field` in the
554// object `obj` holding this reference after marking (contrary to
555// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
556// `obj.field`).
557//
558// This means that after the execution of this slow path, both `ref`
559// and `obj.field` will be up-to-date; i.e., after the flip, both will
560// hold the same to-space reference (unless another thread installed
561// another object reference (different from `ref`) in `obj.field`).
562class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
563 public:
564 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
565 Location ref,
566 GpuRegister obj,
567 Location field_offset,
568 GpuRegister temp1)
569 : SlowPathCodeMIPS64(instruction),
570 ref_(ref),
571 obj_(obj),
572 field_offset_(field_offset),
573 temp1_(temp1) {
574 DCHECK(kEmitCompilerReadBarrier);
575 }
576
577 const char* GetDescription() const OVERRIDE {
578 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
579 }
580
581 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
582 LocationSummary* locations = instruction_->GetLocations();
583 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
584 DCHECK(locations->CanCall());
585 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
586 // This slow path is only used by the UnsafeCASObject intrinsic.
587 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
588 << "Unexpected instruction in read barrier marking and field updating slow path: "
589 << instruction_->DebugName();
590 DCHECK(instruction_->GetLocations()->Intrinsified());
591 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
592 DCHECK(field_offset_.IsRegister()) << field_offset_;
593
594 __ Bind(GetEntryLabel());
595
596 // Save the old reference.
597 // Note that we cannot use AT or TMP to save the old reference, as those
598 // are used by the code that follows, but we need the old reference after
599 // the call to the ReadBarrierMarkRegX entry point.
600 DCHECK_NE(temp1_, AT);
601 DCHECK_NE(temp1_, TMP);
602 __ Move(temp1_, ref_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
608 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
609 (S2 <= ref_reg && ref_reg <= S7) ||
610 (ref_reg == S8)) << ref_reg;
611 // "Compact" slow path, saving two moves.
612 //
613 // Instead of using the standard runtime calling convention (input
614 // and output in A0 and V0 respectively):
615 //
616 // A0 <- ref
617 // V0 <- ReadBarrierMark(A0)
618 // ref <- V0
619 //
620 // we just use rX (the register containing `ref`) as input and output
621 // of a dedicated entrypoint:
622 //
623 // rX <- ReadBarrierMarkRegX(rX)
624 //
625 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100626 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800627 // This runtime call does not require a stack map.
628 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
629 instruction_,
630 this);
631
632 // If the new reference is different from the old reference,
633 // update the field in the holder (`*(obj_ + field_offset_)`).
634 //
635 // Note that this field could also hold a different object, if
636 // another thread had concurrently changed it. In that case, the
637 // the compare-and-set (CAS) loop below would abort, leaving the
638 // field as-is.
639 Mips64Label done;
640 __ Beqc(temp1_, ref_reg, &done);
641
642 // Update the the holder's field atomically. This may fail if
643 // mutator updates before us, but it's OK. This is achieved
644 // using a strong compare-and-set (CAS) operation with relaxed
645 // memory synchronization ordering, where the expected value is
646 // the old reference and the desired value is the new reference.
647
648 // Convenience aliases.
649 GpuRegister base = obj_;
650 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
651 GpuRegister expected = temp1_;
652 GpuRegister value = ref_reg;
653 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
654 GpuRegister tmp = AT; // Value in memory.
655
656 __ Daddu(tmp_ptr, base, offset);
657
658 if (kPoisonHeapReferences) {
659 __ PoisonHeapReference(expected);
660 // Do not poison `value` if it is the same register as
661 // `expected`, which has just been poisoned.
662 if (value != expected) {
663 __ PoisonHeapReference(value);
664 }
665 }
666
667 // do {
668 // tmp = [r_ptr] - expected;
669 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
670
671 Mips64Label loop_head, exit_loop;
672 __ Bind(&loop_head);
673 __ Ll(tmp, tmp_ptr);
674 // The LL instruction sign-extends the 32-bit value, but
675 // 32-bit references must be zero-extended. Zero-extend `tmp`.
676 __ Dext(tmp, tmp, 0, 32);
677 __ Bnec(tmp, expected, &exit_loop);
678 __ Move(tmp, value);
679 __ Sc(tmp, tmp_ptr);
680 __ Beqzc(tmp, &loop_head);
681 __ Bind(&exit_loop);
682
683 if (kPoisonHeapReferences) {
684 __ UnpoisonHeapReference(expected);
685 // Do not unpoison `value` if it is the same register as
686 // `expected`, which has just been unpoisoned.
687 if (value != expected) {
688 __ UnpoisonHeapReference(value);
689 }
690 }
691
692 __ Bind(&done);
693 __ Bc(GetExitLabel());
694 }
695
696 private:
697 // The location (register) of the marked object reference.
698 const Location ref_;
699 // The register containing the object holding the marked object reference field.
700 const GpuRegister obj_;
701 // The location of the offset of the marked reference field within `obj_`.
702 Location field_offset_;
703
704 const GpuRegister temp1_;
705
706 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
707};
708
709// Slow path generating a read barrier for a heap reference.
710class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
711 public:
712 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
713 Location out,
714 Location ref,
715 Location obj,
716 uint32_t offset,
717 Location index)
718 : SlowPathCodeMIPS64(instruction),
719 out_(out),
720 ref_(ref),
721 obj_(obj),
722 offset_(offset),
723 index_(index) {
724 DCHECK(kEmitCompilerReadBarrier);
725 // If `obj` is equal to `out` or `ref`, it means the initial object
726 // has been overwritten by (or after) the heap object reference load
727 // to be instrumented, e.g.:
728 //
729 // __ LoadFromOffset(kLoadWord, out, out, offset);
730 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
731 //
732 // In that case, we have lost the information about the original
733 // object, and the emitted read barrier cannot work properly.
734 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
735 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
736 }
737
738 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
739 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
740 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100741 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800742 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
743 DCHECK(locations->CanCall());
744 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
745 DCHECK(instruction_->IsInstanceFieldGet() ||
746 instruction_->IsStaticFieldGet() ||
747 instruction_->IsArrayGet() ||
748 instruction_->IsInstanceOf() ||
749 instruction_->IsCheckCast() ||
750 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
751 << "Unexpected instruction in read barrier for heap reference slow path: "
752 << instruction_->DebugName();
753
754 __ Bind(GetEntryLabel());
755 SaveLiveRegisters(codegen, locations);
756
757 // We may have to change the index's value, but as `index_` is a
758 // constant member (like other "inputs" of this slow path),
759 // introduce a copy of it, `index`.
760 Location index = index_;
761 if (index_.IsValid()) {
762 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
763 if (instruction_->IsArrayGet()) {
764 // Compute the actual memory offset and store it in `index`.
765 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
766 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
767 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
768 // We are about to change the value of `index_reg` (see the
769 // calls to art::mips64::Mips64Assembler::Sll and
770 // art::mips64::MipsAssembler::Addiu32 below), but it has
771 // not been saved by the previous call to
772 // art::SlowPathCode::SaveLiveRegisters, as it is a
773 // callee-save register --
774 // art::SlowPathCode::SaveLiveRegisters does not consider
775 // callee-save registers, as it has been designed with the
776 // assumption that callee-save registers are supposed to be
777 // handled by the called function. So, as a callee-save
778 // register, `index_reg` _would_ eventually be saved onto
779 // the stack, but it would be too late: we would have
780 // changed its value earlier. Therefore, we manually save
781 // it here into another freely available register,
782 // `free_reg`, chosen of course among the caller-save
783 // registers (as a callee-save `free_reg` register would
784 // exhibit the same problem).
785 //
786 // Note we could have requested a temporary register from
787 // the register allocator instead; but we prefer not to, as
788 // this is a slow path, and we know we can find a
789 // caller-save register that is available.
790 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
791 __ Move(free_reg, index_reg);
792 index_reg = free_reg;
793 index = Location::RegisterLocation(index_reg);
794 } else {
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
797 // (as it is not a callee-save register), so we can freely
798 // use it.
799 }
800 // Shifting the index value contained in `index_reg` by the scale
801 // factor (2) cannot overflow in practice, as the runtime is
802 // unable to allocate object arrays with a size larger than
803 // 2^26 - 1 (that is, 2^28 - 4 bytes).
804 __ Sll(index_reg, index_reg, TIMES_4);
805 static_assert(
806 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
807 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
808 __ Addiu32(index_reg, index_reg, offset_);
809 } else {
810 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
811 // intrinsics, `index_` is not shifted by a scale factor of 2
812 // (as in the case of ArrayGet), as it is actually an offset
813 // to an object field within an object.
814 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
815 DCHECK(instruction_->GetLocations()->Intrinsified());
816 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
817 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
818 << instruction_->AsInvoke()->GetIntrinsic();
819 DCHECK_EQ(offset_, 0U);
820 DCHECK(index_.IsRegister());
821 }
822 }
823
824 // We're moving two or three locations to locations that could
825 // overlap, so we need a parallel move resolver.
826 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100827 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800828 parallel_move.AddMove(ref_,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800831 nullptr);
832 parallel_move.AddMove(obj_,
833 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100834 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800835 nullptr);
836 if (index.IsValid()) {
837 parallel_move.AddMove(index,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800840 nullptr);
841 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
842 } else {
843 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
844 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
845 }
846 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
847 instruction_,
848 instruction_->GetDexPc(),
849 this);
850 CheckEntrypointTypes<
851 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
852 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
853
854 RestoreLiveRegisters(codegen, locations);
855 __ Bc(GetExitLabel());
856 }
857
858 const char* GetDescription() const OVERRIDE {
859 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
860 }
861
862 private:
863 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
864 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
865 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
866 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
867 if (i != ref &&
868 i != obj &&
869 !codegen->IsCoreCalleeSaveRegister(i) &&
870 !codegen->IsBlockedCoreRegister(i)) {
871 return static_cast<GpuRegister>(i);
872 }
873 }
874 // We shall never fail to find a free caller-save register, as
875 // there are more than two core caller-save registers on MIPS64
876 // (meaning it is possible to find one which is different from
877 // `ref` and `obj`).
878 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
879 LOG(FATAL) << "Could not find a free caller-save register";
880 UNREACHABLE();
881 }
882
883 const Location out_;
884 const Location ref_;
885 const Location obj_;
886 const uint32_t offset_;
887 // An additional location containing an index to an array.
888 // Only used for HArrayGet and the UnsafeGetObject &
889 // UnsafeGetObjectVolatile intrinsics.
890 const Location index_;
891
892 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
893};
894
895// Slow path generating a read barrier for a GC root.
896class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
897 public:
898 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
899 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
900 DCHECK(kEmitCompilerReadBarrier);
901 }
902
903 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
904 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100905 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800906 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
907 DCHECK(locations->CanCall());
908 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
909 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
910 << "Unexpected instruction in read barrier for GC root slow path: "
911 << instruction_->DebugName();
912
913 __ Bind(GetEntryLabel());
914 SaveLiveRegisters(codegen, locations);
915
916 InvokeRuntimeCallingConvention calling_convention;
917 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
918 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
919 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100920 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800921 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
922 instruction_,
923 instruction_->GetDexPc(),
924 this);
925 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
926 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
927
928 RestoreLiveRegisters(codegen, locations);
929 __ Bc(GetExitLabel());
930 }
931
932 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
933
934 private:
935 const Location out_;
936 const Location root_;
937
938 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
939};
940
Alexey Frunze4dda3372015-06-01 18:31:49 -0700941CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
942 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100943 const CompilerOptions& compiler_options,
944 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700945 : CodeGenerator(graph,
946 kNumberOfGpuRegisters,
947 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000948 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700949 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
950 arraysize(kCoreCalleeSaves)),
951 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
952 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100953 compiler_options,
954 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100955 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700956 location_builder_(graph, this),
957 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100958 move_resolver_(graph->GetAllocator(), this),
959 assembler_(graph->GetAllocator(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800960 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800961 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100962 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800963 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100964 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
965 pc_relative_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
966 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
967 pc_relative_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
968 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
969 pc_relative_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
970 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800971 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800973 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100974 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700975 // Save RA (containing the return address) to mimic Quick.
976 AddAllocatedRegister(Location::RegisterLocation(RA));
977}
978
979#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100980// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
981#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700982#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700983
984void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700985 // Ensure that we fix up branches.
986 __ FinalizeCode();
987
988 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100989 StackMapStream* stack_map_stream = GetStackMapStream();
990 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -0800991 uint32_t old_position =
Vladimir Marko33bff252017-11-01 14:35:42 +0000992 stack_map_stream->GetStackMap(i).native_pc_code_offset.Uint32Value(InstructionSet::kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700993 uint32_t new_position = __ GetAdjustedPosition(old_position);
994 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100995 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700996 }
997
998 // Adjust pc offsets for the disassembly information.
999 if (disasm_info_ != nullptr) {
1000 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1001 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1002 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1003 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1004 it.second.start = __ GetAdjustedPosition(it.second.start);
1005 it.second.end = __ GetAdjustedPosition(it.second.end);
1006 }
1007 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1008 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1009 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1010 }
1011 }
1012
Alexey Frunze4dda3372015-06-01 18:31:49 -07001013 CodeGenerator::Finalize(allocator);
1014}
1015
1016Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1017 return codegen_->GetAssembler();
1018}
1019
1020void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001021 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001022 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1023}
1024
1025void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001026 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001027 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1028}
1029
1030void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1031 // Pop reg
1032 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001033 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034}
1035
1036void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1037 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001038 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001039 __ Sd(GpuRegister(reg), SP, 0);
1040}
1041
1042void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1043 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1044 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1045 // Allocate a scratch register other than TMP, if available.
1046 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1047 // automatically unspilled when the scratch scope object is destroyed).
1048 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1049 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001050 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001051 __ LoadFromOffset(load_type,
1052 GpuRegister(ensure_scratch.GetRegister()),
1053 SP,
1054 index1 + stack_offset);
1055 __ LoadFromOffset(load_type,
1056 TMP,
1057 SP,
1058 index2 + stack_offset);
1059 __ StoreToOffset(store_type,
1060 GpuRegister(ensure_scratch.GetRegister()),
1061 SP,
1062 index2 + stack_offset);
1063 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1064}
1065
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001066void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1067 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1068 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1069 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1070 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1071}
1072
Alexey Frunze4dda3372015-06-01 18:31:49 -07001073static dwarf::Reg DWARFReg(GpuRegister reg) {
1074 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1075}
1076
David Srbeckyba702002016-02-01 18:15:29 +00001077static dwarf::Reg DWARFReg(FpuRegister reg) {
1078 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1079}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001080
1081void CodeGeneratorMIPS64::GenerateFrameEntry() {
1082 __ Bind(&frame_entry_label_);
1083
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001084 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1085 LOG(WARNING) << "Unimplemented hotness update in mips64 backend";
1086 }
1087
Vladimir Marko33bff252017-11-01 14:35:42 +00001088 bool do_overflow_check =
1089 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001090
1091 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001092 __ LoadFromOffset(
1093 kLoadWord,
1094 ZERO,
1095 SP,
1096 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001097 RecordPcInfo(nullptr, 0);
1098 }
1099
Alexey Frunze4dda3372015-06-01 18:31:49 -07001100 if (HasEmptyFrame()) {
1101 return;
1102 }
1103
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001104 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001105 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1106 LOG(FATAL) << "Stack frame larger than "
1107 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001108 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001109
1110 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001112 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113 __ IncreaseFrameSize(ofs);
1114
1115 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1116 GpuRegister reg = kCoreCalleeSaves[i];
1117 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001118 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001119 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001120 __ cfi().RelOffset(DWARFReg(reg), ofs);
1121 }
1122 }
1123
1124 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1125 FpuRegister reg = kFpuCalleeSaves[i];
1126 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001127 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001128 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001129 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001130 }
1131 }
1132
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001133 // Save the current method if we need it. Note that we do not
1134 // do this in HCurrentMethod, as the instruction might have been removed
1135 // in the SSA graph.
1136 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001137 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001138 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001139
1140 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1141 // Initialize should_deoptimize flag to 0.
1142 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1143 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001144}
1145
1146void CodeGeneratorMIPS64::GenerateFrameExit() {
1147 __ cfi().RememberState();
1148
Alexey Frunze4dda3372015-06-01 18:31:49 -07001149 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001150 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001151
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001152 // For better instruction scheduling restore RA before other registers.
1153 uint32_t ofs = GetFrameSize();
1154 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001155 GpuRegister reg = kCoreCalleeSaves[i];
1156 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001157 ofs -= kMips64DoublewordSize;
1158 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001159 __ cfi().Restore(DWARFReg(reg));
1160 }
1161 }
1162
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001163 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1164 FpuRegister reg = kFpuCalleeSaves[i];
1165 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1166 ofs -= kMips64DoublewordSize;
1167 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1168 __ cfi().Restore(DWARFReg(reg));
1169 }
1170 }
1171
1172 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001173 }
1174
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001175 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001176
1177 __ cfi().RestoreState();
1178 __ cfi().DefCFAOffset(GetFrameSize());
1179}
1180
1181void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1182 __ Bind(GetLabelOf(block));
1183}
1184
1185void CodeGeneratorMIPS64::MoveLocation(Location destination,
1186 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001187 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001188 if (source.Equals(destination)) {
1189 return;
1190 }
1191
1192 // A valid move can always be inferred from the destination and source
1193 // locations. When moving from and to a register, the argument type can be
1194 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001195 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001196 DCHECK_EQ(unspecified_type, false);
1197
1198 if (destination.IsRegister() || destination.IsFpuRegister()) {
1199 if (unspecified_type) {
1200 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1201 if (source.IsStackSlot() ||
1202 (src_cst != nullptr && (src_cst->IsIntConstant()
1203 || src_cst->IsFloatConstant()
1204 || src_cst->IsNullConstant()))) {
1205 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001206 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001207 } else {
1208 // If the source is a double stack slot or a 64bit constant, a 64bit
1209 // type is appropriate. Else the source is a register, and since the
1210 // type has not been specified, we chose a 64bit type to force a 64bit
1211 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001212 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001213 }
1214 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001215 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1216 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001217 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1218 // Move to GPR/FPR from stack
1219 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001220 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001221 __ LoadFpuFromOffset(load_type,
1222 destination.AsFpuRegister<FpuRegister>(),
1223 SP,
1224 source.GetStackIndex());
1225 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001226 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001227 __ LoadFromOffset(load_type,
1228 destination.AsRegister<GpuRegister>(),
1229 SP,
1230 source.GetStackIndex());
1231 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001232 } else if (source.IsSIMDStackSlot()) {
1233 __ LoadFpuFromOffset(kLoadQuadword,
1234 destination.AsFpuRegister<FpuRegister>(),
1235 SP,
1236 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001237 } else if (source.IsConstant()) {
1238 // Move to GPR/FPR from constant
1239 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001240 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001241 gpr = destination.AsRegister<GpuRegister>();
1242 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001243 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001244 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001245 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001246 gpr = ZERO;
1247 } else {
1248 __ LoadConst32(gpr, value);
1249 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001250 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001251 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001252 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001253 gpr = ZERO;
1254 } else {
1255 __ LoadConst64(gpr, value);
1256 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001257 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001258 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001259 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001260 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001261 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1262 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001263 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001264 if (destination.IsRegister()) {
1265 // Move to GPR from GPR
1266 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1267 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001269 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001270 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1271 } else {
1272 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1273 }
1274 }
1275 } else if (source.IsFpuRegister()) {
1276 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001277 if (GetGraph()->HasSIMD()) {
1278 __ MoveV(VectorRegisterFrom(destination),
1279 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001280 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001281 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001282 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001283 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1284 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001286 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1287 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001288 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001289 } else {
1290 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001291 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001292 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1293 } else {
1294 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1295 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001296 }
1297 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001298 } else if (destination.IsSIMDStackSlot()) {
1299 if (source.IsFpuRegister()) {
1300 __ StoreFpuToOffset(kStoreQuadword,
1301 source.AsFpuRegister<FpuRegister>(),
1302 SP,
1303 destination.GetStackIndex());
1304 } else {
1305 DCHECK(source.IsSIMDStackSlot());
1306 __ LoadFpuFromOffset(kLoadQuadword,
1307 FTMP,
1308 SP,
1309 source.GetStackIndex());
1310 __ StoreFpuToOffset(kStoreQuadword,
1311 FTMP,
1312 SP,
1313 destination.GetStackIndex());
1314 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001315 } else { // The destination is not a register. It must be a stack slot.
1316 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1317 if (source.IsRegister() || source.IsFpuRegister()) {
1318 if (unspecified_type) {
1319 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001321 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 dst_type =
1323 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001324 }
1325 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001326 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1327 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001328 // Move to stack from GPR/FPR
1329 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1330 if (source.IsRegister()) {
1331 __ StoreToOffset(store_type,
1332 source.AsRegister<GpuRegister>(),
1333 SP,
1334 destination.GetStackIndex());
1335 } else {
1336 __ StoreFpuToOffset(store_type,
1337 source.AsFpuRegister<FpuRegister>(),
1338 SP,
1339 destination.GetStackIndex());
1340 }
1341 } else if (source.IsConstant()) {
1342 // Move to stack from constant
1343 HConstant* src_cst = source.GetConstant();
1344 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001345 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001346 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001347 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1348 if (value != 0) {
1349 gpr = TMP;
1350 __ LoadConst32(gpr, value);
1351 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001352 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001353 DCHECK(destination.IsDoubleStackSlot());
1354 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1355 if (value != 0) {
1356 gpr = TMP;
1357 __ LoadConst64(gpr, value);
1358 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001359 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001360 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001361 } else {
1362 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1363 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1364 // Move to stack from stack
1365 if (destination.IsStackSlot()) {
1366 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1367 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1368 } else {
1369 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1370 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1371 }
1372 }
1373 }
1374}
1375
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001376void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001377 DCHECK(!loc1.IsConstant());
1378 DCHECK(!loc2.IsConstant());
1379
1380 if (loc1.Equals(loc2)) {
1381 return;
1382 }
1383
1384 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1385 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001386 bool is_simd1 = loc1.IsSIMDStackSlot();
1387 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001388 bool is_fp_reg1 = loc1.IsFpuRegister();
1389 bool is_fp_reg2 = loc2.IsFpuRegister();
1390
1391 if (loc2.IsRegister() && loc1.IsRegister()) {
1392 // Swap 2 GPRs
1393 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1394 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1395 __ Move(TMP, r2);
1396 __ Move(r2, r1);
1397 __ Move(r1, TMP);
1398 } else if (is_fp_reg2 && is_fp_reg1) {
1399 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001400 if (GetGraph()->HasSIMD()) {
1401 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1402 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1403 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001404 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001405 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1406 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1407 if (type == DataType::Type::kFloat32) {
1408 __ MovS(FTMP, r1);
1409 __ MovS(r1, r2);
1410 __ MovS(r2, FTMP);
1411 } else {
1412 DCHECK_EQ(type, DataType::Type::kFloat64);
1413 __ MovD(FTMP, r1);
1414 __ MovD(r1, r2);
1415 __ MovD(r2, FTMP);
1416 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001417 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001418 } else if (is_slot1 != is_slot2) {
1419 // Swap GPR/FPR and stack slot
1420 Location reg_loc = is_slot1 ? loc2 : loc1;
1421 Location mem_loc = is_slot1 ? loc1 : loc2;
1422 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1423 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001424 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001425 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1426 if (reg_loc.IsFpuRegister()) {
1427 __ StoreFpuToOffset(store_type,
1428 reg_loc.AsFpuRegister<FpuRegister>(),
1429 SP,
1430 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001431 if (mem_loc.IsStackSlot()) {
1432 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1433 } else {
1434 DCHECK(mem_loc.IsDoubleStackSlot());
1435 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1436 }
1437 } else {
1438 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1439 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1440 }
1441 } else if (is_slot1 && is_slot2) {
1442 move_resolver_.Exchange(loc1.GetStackIndex(),
1443 loc2.GetStackIndex(),
1444 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001445 } else if (is_simd1 && is_simd2) {
1446 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1447 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1448 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1449 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1450 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1451 __ StoreFpuToOffset(kStoreQuadword,
1452 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1453 SP,
1454 mem_loc.GetStackIndex());
1455 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001456 } else {
1457 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1458 }
1459}
1460
Calin Juravle175dc732015-08-25 15:42:32 +01001461void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1462 DCHECK(location.IsRegister());
1463 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1464}
1465
Calin Juravlee460d1d2015-09-29 04:52:17 +01001466void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1467 if (location.IsRegister()) {
1468 locations->AddTemp(location);
1469 } else {
1470 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1471 }
1472}
1473
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001474void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1475 GpuRegister value,
1476 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001477 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001478 GpuRegister card = AT;
1479 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001480 if (value_can_be_null) {
1481 __ Beqzc(value, &done);
1482 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001483 __ LoadFromOffset(kLoadDoubleword,
1484 card,
1485 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001486 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001487 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1488 __ Daddu(temp, card, temp);
1489 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001490 if (value_can_be_null) {
1491 __ Bind(&done);
1492 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001493}
1494
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001495template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001496inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1497 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001498 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001499 for (const PcRelativePatchInfo& info : infos) {
1500 const DexFile& dex_file = info.target_dex_file;
1501 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001502 DCHECK(info.label.IsBound());
1503 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1504 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1505 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1506 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001507 }
1508}
1509
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001510void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001511 DCHECK(linker_patches->empty());
1512 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001513 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001514 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001515 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001516 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001517 pc_relative_string_patches_.size() +
1518 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001519 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001520 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001521 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1522 pc_relative_method_patches_, linker_patches);
1523 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1524 pc_relative_type_patches_, linker_patches);
1525 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1526 pc_relative_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001527 } else {
1528 DCHECK(pc_relative_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001529 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1530 pc_relative_type_patches_, linker_patches);
1531 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1532 pc_relative_string_patches_, linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001533 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001534 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1535 method_bss_entry_patches_, linker_patches);
1536 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1537 type_bss_entry_patches_, linker_patches);
1538 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1539 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001540 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001541}
1542
Vladimir Marko65979462017-05-19 17:25:12 +01001543CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001544 MethodReference target_method,
1545 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001546 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001547 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001548 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001549 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001550}
1551
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001552CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001553 MethodReference target_method,
1554 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001555 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001556 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001557 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001558 &method_bss_entry_patches_);
1559}
1560
Alexey Frunzef63f5692016-12-13 17:43:11 -08001561CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001562 const DexFile& dex_file,
1563 dex::TypeIndex type_index,
1564 const PcRelativePatchInfo* info_high) {
1565 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001566}
1567
Vladimir Marko1998cd02017-01-13 13:02:58 +00001568CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001569 const DexFile& dex_file,
1570 dex::TypeIndex type_index,
1571 const PcRelativePatchInfo* info_high) {
1572 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001573}
1574
Vladimir Marko65979462017-05-19 17:25:12 +01001575CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001576 const DexFile& dex_file,
1577 dex::StringIndex string_index,
1578 const PcRelativePatchInfo* info_high) {
1579 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001580}
1581
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001582CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1583 const DexFile& dex_file,
1584 dex::StringIndex string_index,
1585 const PcRelativePatchInfo* info_high) {
1586 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1587}
1588
Alexey Frunze19f6c692016-11-30 19:19:55 -08001589CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001590 const DexFile& dex_file,
1591 uint32_t offset_or_index,
1592 const PcRelativePatchInfo* info_high,
1593 ArenaDeque<PcRelativePatchInfo>* patches) {
1594 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001595 return &patches->back();
1596}
1597
Alexey Frunzef63f5692016-12-13 17:43:11 -08001598Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1599 return map->GetOrCreate(
1600 value,
1601 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1602}
1603
Alexey Frunze19f6c692016-11-30 19:19:55 -08001604Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1605 return uint64_literals_.GetOrCreate(
1606 value,
1607 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1608}
1609
Alexey Frunzef63f5692016-12-13 17:43:11 -08001610Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001611 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001612}
1613
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001614void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1615 GpuRegister out,
1616 PcRelativePatchInfo* info_low) {
1617 DCHECK(!info_high->patch_info_high);
1618 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001619 // Add the high half of a 32-bit offset to PC.
1620 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001621 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001622 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001623 if (info_low != nullptr) {
1624 DCHECK_EQ(info_low->patch_info_high, info_high);
1625 __ Bind(&info_low->label);
1626 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001627}
1628
Alexey Frunze627c1a02017-01-30 19:28:14 -08001629Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1630 dex::StringIndex string_index,
1631 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001632 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001633 return jit_string_patches_.GetOrCreate(
1634 StringReference(&dex_file, string_index),
1635 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1636}
1637
1638Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1639 dex::TypeIndex type_index,
1640 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001641 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001642 return jit_class_patches_.GetOrCreate(
1643 TypeReference(&dex_file, type_index),
1644 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1645}
1646
1647void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1648 const uint8_t* roots_data,
1649 const Literal* literal,
1650 uint64_t index_in_table) const {
1651 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1652 uintptr_t address =
1653 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1654 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1655}
1656
1657void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1658 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001659 const StringReference& string_reference = entry.first;
1660 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001661 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001662 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001663 }
1664 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001665 const TypeReference& type_reference = entry.first;
1666 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001667 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001668 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001669 }
1670}
1671
David Brazdil58282f42016-01-14 12:45:10 +00001672void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001673 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1674 blocked_core_registers_[ZERO] = true;
1675 blocked_core_registers_[K0] = true;
1676 blocked_core_registers_[K1] = true;
1677 blocked_core_registers_[GP] = true;
1678 blocked_core_registers_[SP] = true;
1679 blocked_core_registers_[RA] = true;
1680
Lazar Trsicd9672662015-09-03 17:33:01 +02001681 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1682 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001683 blocked_core_registers_[AT] = true;
1684 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001685 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001686 blocked_fpu_registers_[FTMP] = true;
1687
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001688 if (GetInstructionSetFeatures().HasMsa()) {
1689 // To be used just for MSA instructions.
1690 blocked_fpu_registers_[FTMP2] = true;
1691 }
1692
Alexey Frunze4dda3372015-06-01 18:31:49 -07001693 // Reserve suspend and thread registers.
1694 blocked_core_registers_[S0] = true;
1695 blocked_core_registers_[TR] = true;
1696
1697 // Reserve T9 for function calls
1698 blocked_core_registers_[T9] = true;
1699
Goran Jakovljevic782be112016-06-21 12:39:04 +02001700 if (GetGraph()->IsDebuggable()) {
1701 // Stubs do not save callee-save floating point registers. If the graph
1702 // is debuggable, we need to deal with these registers differently. For
1703 // now, just block them.
1704 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1705 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1706 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001707 }
1708}
1709
Alexey Frunze4dda3372015-06-01 18:31:49 -07001710size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1711 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001712 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001713}
1714
1715size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1716 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001717 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001718}
1719
1720size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001721 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1722 FpuRegister(reg_id),
1723 SP,
1724 stack_index);
1725 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001726}
1727
1728size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001729 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1730 FpuRegister(reg_id),
1731 SP,
1732 stack_index);
1733 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001734}
1735
1736void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001737 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001738}
1739
1740void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001741 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001742}
1743
Calin Juravle175dc732015-08-25 15:42:32 +01001744void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001745 HInstruction* instruction,
1746 uint32_t dex_pc,
1747 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001748 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001749 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001750 if (EntrypointRequiresStackMap(entrypoint)) {
1751 RecordPcInfo(instruction, dex_pc, slow_path);
1752 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001753}
1754
Alexey Frunze15958152017-02-09 19:08:30 -08001755void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1756 HInstruction* instruction,
1757 SlowPathCode* slow_path) {
1758 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1759 GenerateInvokeRuntime(entry_point_offset);
1760}
1761
1762void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1763 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1764 __ Jalr(T9);
1765 __ Nop();
1766}
1767
Alexey Frunze4dda3372015-06-01 18:31:49 -07001768void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1769 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001770 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1771 const size_t status_byte_offset =
1772 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1773 constexpr uint32_t shifted_initialized_value =
1774 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1775
1776 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
1777 __ LoadConst32(AT, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00001778 __ Bltuc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001779 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1780 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001781 __ Bind(slow_path->GetExitLabel());
1782}
1783
1784void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1785 __ Sync(0); // only stype 0 is supported
1786}
1787
1788void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1789 HBasicBlock* successor) {
1790 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001791 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1792
1793 if (slow_path == nullptr) {
1794 slow_path =
1795 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1796 instruction->SetSlowPath(slow_path);
1797 codegen_->AddSlowPath(slow_path);
1798 if (successor != nullptr) {
1799 DCHECK(successor->IsLoopHeader());
1800 }
1801 } else {
1802 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1803 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001804
1805 __ LoadFromOffset(kLoadUnsignedHalfword,
1806 TMP,
1807 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001808 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001809 if (successor == nullptr) {
1810 __ Bnezc(TMP, slow_path->GetEntryLabel());
1811 __ Bind(slow_path->GetReturnLabel());
1812 } else {
1813 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001814 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001815 // slow_path will return to GetLabelOf(successor).
1816 }
1817}
1818
1819InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1820 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001821 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001822 assembler_(codegen->GetAssembler()),
1823 codegen_(codegen) {}
1824
1825void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1826 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001827 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001828 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001829 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001830 case DataType::Type::kInt32:
1831 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001832 locations->SetInAt(0, Location::RequiresRegister());
1833 HInstruction* right = instruction->InputAt(1);
1834 bool can_use_imm = false;
1835 if (right->IsConstant()) {
1836 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1837 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1838 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001839 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001840 DCHECK(instruction->IsAdd() || instruction->IsSub());
1841 bool single_use = right->GetUses().HasExactlyOneElement();
1842 if (instruction->IsSub()) {
1843 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1844 imm = -imm;
1845 }
1846 }
1847 if (type == DataType::Type::kInt32) {
1848 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1849 } else {
1850 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1851 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001852 }
1853 }
1854 if (can_use_imm)
1855 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1856 else
1857 locations->SetInAt(1, Location::RequiresRegister());
1858 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1859 }
1860 break;
1861
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001862 case DataType::Type::kFloat32:
1863 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001864 locations->SetInAt(0, Location::RequiresFpuRegister());
1865 locations->SetInAt(1, Location::RequiresFpuRegister());
1866 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1867 break;
1868
1869 default:
1870 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1871 }
1872}
1873
1874void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001875 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001876 LocationSummary* locations = instruction->GetLocations();
1877
1878 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001879 case DataType::Type::kInt32:
1880 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001881 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1882 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1883 Location rhs_location = locations->InAt(1);
1884
1885 GpuRegister rhs_reg = ZERO;
1886 int64_t rhs_imm = 0;
1887 bool use_imm = rhs_location.IsConstant();
1888 if (use_imm) {
1889 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1890 } else {
1891 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1892 }
1893
1894 if (instruction->IsAnd()) {
1895 if (use_imm)
1896 __ Andi(dst, lhs, rhs_imm);
1897 else
1898 __ And(dst, lhs, rhs_reg);
1899 } else if (instruction->IsOr()) {
1900 if (use_imm)
1901 __ Ori(dst, lhs, rhs_imm);
1902 else
1903 __ Or(dst, lhs, rhs_reg);
1904 } else if (instruction->IsXor()) {
1905 if (use_imm)
1906 __ Xori(dst, lhs, rhs_imm);
1907 else
1908 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01001909 } else if (instruction->IsAdd() || instruction->IsSub()) {
1910 if (instruction->IsSub()) {
1911 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001912 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001913 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01001914 if (use_imm) {
1915 if (IsInt<16>(rhs_imm)) {
1916 __ Addiu(dst, lhs, rhs_imm);
1917 } else {
1918 int16_t rhs_imm_high = High16Bits(rhs_imm);
1919 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1920 if (rhs_imm_low < 0) {
1921 rhs_imm_high += 1;
1922 }
1923 __ Aui(dst, lhs, rhs_imm_high);
1924 if (rhs_imm_low != 0) {
1925 __ Addiu(dst, dst, rhs_imm_low);
1926 }
1927 }
1928 } else {
1929 if (instruction->IsAdd()) {
1930 __ Addu(dst, lhs, rhs_reg);
1931 } else {
1932 DCHECK(instruction->IsSub());
1933 __ Subu(dst, lhs, rhs_reg);
1934 }
1935 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001936 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001937 if (use_imm) {
1938 if (IsInt<16>(rhs_imm)) {
1939 __ Daddiu(dst, lhs, rhs_imm);
1940 } else if (IsInt<32>(rhs_imm)) {
1941 int16_t rhs_imm_high = High16Bits(rhs_imm);
1942 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1943 bool overflow_hi16 = false;
1944 if (rhs_imm_low < 0) {
1945 rhs_imm_high += 1;
1946 overflow_hi16 = (rhs_imm_high == -32768);
1947 }
1948 __ Daui(dst, lhs, rhs_imm_high);
1949 if (rhs_imm_low != 0) {
1950 __ Daddiu(dst, dst, rhs_imm_low);
1951 }
1952 if (overflow_hi16) {
1953 __ Dahi(dst, 1);
1954 }
1955 } else {
1956 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
1957 if (rhs_imm_low < 0) {
1958 rhs_imm += (INT64_C(1) << 16);
1959 }
1960 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
1961 if (rhs_imm_upper < 0) {
1962 rhs_imm += (INT64_C(1) << 32);
1963 }
1964 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
1965 if (rhs_imm_high < 0) {
1966 rhs_imm += (INT64_C(1) << 48);
1967 }
1968 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
1969 GpuRegister tmp = lhs;
1970 if (rhs_imm_low != 0) {
1971 __ Daddiu(dst, tmp, rhs_imm_low);
1972 tmp = dst;
1973 }
1974 // Dahi and Dati must use the same input and output register, so we have to initialize
1975 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
1976 // Daui(dst, lhs, 0).
1977 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
1978 __ Daui(dst, tmp, rhs_imm_upper);
1979 }
1980 if (rhs_imm_high != 0) {
1981 __ Dahi(dst, rhs_imm_high);
1982 }
1983 if (rhs_imm_top != 0) {
1984 __ Dati(dst, rhs_imm_top);
1985 }
1986 }
1987 } else if (instruction->IsAdd()) {
1988 __ Daddu(dst, lhs, rhs_reg);
1989 } else {
1990 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01001992 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001993 }
1994 }
1995 break;
1996 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001997 case DataType::Type::kFloat32:
1998 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001999 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2000 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2001 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2002 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002003 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002004 __ AddS(dst, lhs, rhs);
2005 else
2006 __ AddD(dst, lhs, rhs);
2007 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002008 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002009 __ SubS(dst, lhs, rhs);
2010 else
2011 __ SubD(dst, lhs, rhs);
2012 } else {
2013 LOG(FATAL) << "Unexpected floating-point binary operation";
2014 }
2015 break;
2016 }
2017 default:
2018 LOG(FATAL) << "Unexpected binary operation type " << type;
2019 }
2020}
2021
2022void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002023 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002024
Vladimir Markoca6fff82017-10-03 14:49:14 +01002025 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002026 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002027 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002028 case DataType::Type::kInt32:
2029 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002030 locations->SetInAt(0, Location::RequiresRegister());
2031 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002032 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002033 break;
2034 }
2035 default:
2036 LOG(FATAL) << "Unexpected shift type " << type;
2037 }
2038}
2039
2040void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002041 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002042 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002043 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002044
2045 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002046 case DataType::Type::kInt32:
2047 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002048 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2049 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2050 Location rhs_location = locations->InAt(1);
2051
2052 GpuRegister rhs_reg = ZERO;
2053 int64_t rhs_imm = 0;
2054 bool use_imm = rhs_location.IsConstant();
2055 if (use_imm) {
2056 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2057 } else {
2058 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2059 }
2060
2061 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002062 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002063 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002064
Alexey Frunze92d90602015-12-18 18:16:36 -08002065 if (shift_value == 0) {
2066 if (dst != lhs) {
2067 __ Move(dst, lhs);
2068 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002070 if (instr->IsShl()) {
2071 __ Sll(dst, lhs, shift_value);
2072 } else if (instr->IsShr()) {
2073 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002074 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002075 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002076 } else {
2077 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002078 }
2079 } else {
2080 if (shift_value < 32) {
2081 if (instr->IsShl()) {
2082 __ Dsll(dst, lhs, shift_value);
2083 } else if (instr->IsShr()) {
2084 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002085 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002086 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002087 } else {
2088 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002089 }
2090 } else {
2091 shift_value -= 32;
2092 if (instr->IsShl()) {
2093 __ Dsll32(dst, lhs, shift_value);
2094 } else if (instr->IsShr()) {
2095 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002096 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002097 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002098 } else {
2099 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002100 }
2101 }
2102 }
2103 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002104 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002105 if (instr->IsShl()) {
2106 __ Sllv(dst, lhs, rhs_reg);
2107 } else if (instr->IsShr()) {
2108 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002109 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002110 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002111 } else {
2112 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002113 }
2114 } else {
2115 if (instr->IsShl()) {
2116 __ Dsllv(dst, lhs, rhs_reg);
2117 } else if (instr->IsShr()) {
2118 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002119 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002120 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002121 } else {
2122 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002123 }
2124 }
2125 }
2126 break;
2127 }
2128 default:
2129 LOG(FATAL) << "Unexpected shift operation type " << type;
2130 }
2131}
2132
2133void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2134 HandleBinaryOp(instruction);
2135}
2136
2137void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2138 HandleBinaryOp(instruction);
2139}
2140
2141void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2142 HandleBinaryOp(instruction);
2143}
2144
2145void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2146 HandleBinaryOp(instruction);
2147}
2148
2149void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002150 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002151 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002152 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002153 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002154 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2155 object_array_get_with_read_barrier
2156 ? LocationSummary::kCallOnSlowPath
2157 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002158 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2159 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2160 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002161 locations->SetInAt(0, Location::RequiresRegister());
2162 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002163 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002164 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2165 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002166 // The output overlaps in the case of an object array get with
2167 // read barriers enabled: we do not want the move to overwrite the
2168 // array's location, as we need it to emit the read barrier.
2169 locations->SetOut(Location::RequiresRegister(),
2170 object_array_get_with_read_barrier
2171 ? Location::kOutputOverlap
2172 : Location::kNoOutputOverlap);
2173 }
2174 // We need a temporary register for the read barrier marking slow
2175 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2176 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002177 bool temp_needed = instruction->GetIndex()->IsConstant()
2178 ? !kBakerReadBarrierThunksEnableForFields
2179 : !kBakerReadBarrierThunksEnableForArrays;
2180 if (temp_needed) {
2181 locations->AddTemp(Location::RequiresRegister());
2182 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002183 }
2184}
2185
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002186static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2187 auto null_checker = [codegen, instruction]() {
2188 codegen->MaybeRecordImplicitNullCheck(instruction);
2189 };
2190 return null_checker;
2191}
2192
Alexey Frunze4dda3372015-06-01 18:31:49 -07002193void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2194 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002195 Location obj_loc = locations->InAt(0);
2196 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2197 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002199 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002200 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002201
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002203 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2204 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002205 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002206 case DataType::Type::kBool:
2207 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002208 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002209 if (index.IsConstant()) {
2210 size_t offset =
2211 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002212 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002213 } else {
2214 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002215 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002216 }
2217 break;
2218 }
2219
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002220 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002221 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002222 if (index.IsConstant()) {
2223 size_t offset =
2224 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002225 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002226 } else {
2227 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002228 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002229 }
2230 break;
2231 }
2232
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002233 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002234 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002235 if (maybe_compressed_char_at) {
2236 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002237 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002238 __ Dext(TMP, TMP, 0, 1);
2239 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2240 "Expecting 0=compressed, 1=uncompressed");
2241 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002242 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002243 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2244 if (maybe_compressed_char_at) {
2245 Mips64Label uncompressed_load, done;
2246 __ Bnezc(TMP, &uncompressed_load);
2247 __ LoadFromOffset(kLoadUnsignedByte,
2248 out,
2249 obj,
2250 data_offset + (const_index << TIMES_1));
2251 __ Bc(&done);
2252 __ Bind(&uncompressed_load);
2253 __ LoadFromOffset(kLoadUnsignedHalfword,
2254 out,
2255 obj,
2256 data_offset + (const_index << TIMES_2));
2257 __ Bind(&done);
2258 } else {
2259 __ LoadFromOffset(kLoadUnsignedHalfword,
2260 out,
2261 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002262 data_offset + (const_index << TIMES_2),
2263 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002264 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002265 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002266 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2267 if (maybe_compressed_char_at) {
2268 Mips64Label uncompressed_load, done;
2269 __ Bnezc(TMP, &uncompressed_load);
2270 __ Daddu(TMP, obj, index_reg);
2271 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2272 __ Bc(&done);
2273 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002274 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002275 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2276 __ Bind(&done);
2277 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002278 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002279 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002280 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002281 }
2282 break;
2283 }
2284
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002285 case DataType::Type::kInt16: {
2286 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2287 if (index.IsConstant()) {
2288 size_t offset =
2289 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2290 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2291 } else {
2292 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2293 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2294 }
2295 break;
2296 }
2297
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002298 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002299 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002300 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002301 LoadOperandType load_type =
2302 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002303 if (index.IsConstant()) {
2304 size_t offset =
2305 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002306 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002307 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002308 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002309 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310 }
2311 break;
2312 }
2313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002314 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002315 static_assert(
2316 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2317 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2318 // /* HeapReference<Object> */ out =
2319 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2320 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002321 bool temp_needed = index.IsConstant()
2322 ? !kBakerReadBarrierThunksEnableForFields
2323 : !kBakerReadBarrierThunksEnableForArrays;
2324 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002325 // Note that a potential implicit null check is handled in this
2326 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002327 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2328 if (index.IsConstant()) {
2329 // Array load with a constant index can be treated as a field load.
2330 size_t offset =
2331 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2332 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2333 out_loc,
2334 obj,
2335 offset,
2336 temp,
2337 /* needs_null_check */ false);
2338 } else {
2339 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2340 out_loc,
2341 obj,
2342 data_offset,
2343 index,
2344 temp,
2345 /* needs_null_check */ false);
2346 }
Alexey Frunze15958152017-02-09 19:08:30 -08002347 } else {
2348 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2349 if (index.IsConstant()) {
2350 size_t offset =
2351 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2352 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2353 // If read barriers are enabled, emit read barriers other than
2354 // Baker's using a slow path (and also unpoison the loaded
2355 // reference, if heap poisoning is enabled).
2356 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2357 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002358 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002359 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2360 // If read barriers are enabled, emit read barriers other than
2361 // Baker's using a slow path (and also unpoison the loaded
2362 // reference, if heap poisoning is enabled).
2363 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2364 out_loc,
2365 out_loc,
2366 obj_loc,
2367 data_offset,
2368 index);
2369 }
2370 }
2371 break;
2372 }
2373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002374 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002375 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002376 if (index.IsConstant()) {
2377 size_t offset =
2378 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002379 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002380 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002381 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002382 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002383 }
2384 break;
2385 }
2386
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002387 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002388 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002389 if (index.IsConstant()) {
2390 size_t offset =
2391 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002392 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002393 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002394 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002395 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002396 }
2397 break;
2398 }
2399
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002400 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002401 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002402 if (index.IsConstant()) {
2403 size_t offset =
2404 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002405 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002406 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002407 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002408 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002409 }
2410 break;
2411 }
2412
Aart Bik66c158e2018-01-31 12:55:04 -08002413 case DataType::Type::kUint32:
2414 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002415 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002416 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2417 UNREACHABLE();
2418 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002419}
2420
2421void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002422 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002423 locations->SetInAt(0, Location::RequiresRegister());
2424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2425}
2426
2427void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2428 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002429 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002430 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2431 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2432 __ LoadFromOffset(kLoadWord, out, obj, offset);
2433 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002434 // Mask out compression flag from String's array length.
2435 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2436 __ Srl(out, out, 1u);
2437 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002438}
2439
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002440Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2441 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2442 ? Location::ConstantLocation(instruction->AsConstant())
2443 : Location::RequiresRegister();
2444}
2445
2446Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2447 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2448 // We can store a non-zero float or double constant without first loading it into the FPU,
2449 // but we should only prefer this if the constant has a single use.
2450 if (instruction->IsConstant() &&
2451 (instruction->AsConstant()->IsZeroBitPattern() ||
2452 instruction->GetUses().HasExactlyOneElement())) {
2453 return Location::ConstantLocation(instruction->AsConstant());
2454 // Otherwise fall through and require an FPU register for the constant.
2455 }
2456 return Location::RequiresFpuRegister();
2457}
2458
Alexey Frunze4dda3372015-06-01 18:31:49 -07002459void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002460 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002461
2462 bool needs_write_barrier =
2463 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2464 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2465
Vladimir Markoca6fff82017-10-03 14:49:14 +01002466 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002467 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002468 may_need_runtime_call_for_type_check ?
2469 LocationSummary::kCallOnSlowPath :
2470 LocationSummary::kNoCall);
2471
2472 locations->SetInAt(0, Location::RequiresRegister());
2473 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002474 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002475 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002476 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002477 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2478 }
2479 if (needs_write_barrier) {
2480 // Temporary register for the write barrier.
2481 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002482 }
2483}
2484
2485void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2486 LocationSummary* locations = instruction->GetLocations();
2487 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2488 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002489 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002490 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002491 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002492 bool needs_write_barrier =
2493 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002494 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002495 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002496
2497 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002499 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002500 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002501 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002502 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002503 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002504 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002505 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2506 }
2507 if (value_location.IsConstant()) {
2508 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2509 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2510 } else {
2511 GpuRegister value = value_location.AsRegister<GpuRegister>();
2512 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002513 }
2514 break;
2515 }
2516
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002517 case DataType::Type::kUint16:
2518 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002519 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002520 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002521 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002522 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002523 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002524 }
2525 if (value_location.IsConstant()) {
2526 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2527 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2528 } else {
2529 GpuRegister value = value_location.AsRegister<GpuRegister>();
2530 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002531 }
2532 break;
2533 }
2534
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002535 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002536 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2537 if (index.IsConstant()) {
2538 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2539 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002540 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002541 }
2542 if (value_location.IsConstant()) {
2543 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2544 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2545 } else {
2546 GpuRegister value = value_location.AsRegister<GpuRegister>();
2547 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2548 }
2549 break;
2550 }
2551
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002552 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002553 if (value_location.IsConstant()) {
2554 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002555 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002556 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002557 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002558 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002559 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002560 }
Alexey Frunze15958152017-02-09 19:08:30 -08002561 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2562 DCHECK_EQ(value, 0);
2563 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2564 DCHECK(!needs_write_barrier);
2565 DCHECK(!may_need_runtime_call_for_type_check);
2566 break;
2567 }
2568
2569 DCHECK(needs_write_barrier);
2570 GpuRegister value = value_location.AsRegister<GpuRegister>();
2571 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2572 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2573 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2574 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2575 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2576 Mips64Label done;
2577 SlowPathCodeMIPS64* slow_path = nullptr;
2578
2579 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002580 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002581 codegen_->AddSlowPath(slow_path);
2582 if (instruction->GetValueCanBeNull()) {
2583 Mips64Label non_zero;
2584 __ Bnezc(value, &non_zero);
2585 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2586 if (index.IsConstant()) {
2587 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002588 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002589 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002590 }
Alexey Frunze15958152017-02-09 19:08:30 -08002591 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2592 __ Bc(&done);
2593 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002594 }
Alexey Frunze15958152017-02-09 19:08:30 -08002595
2596 // Note that when read barriers are enabled, the type checks
2597 // are performed without read barriers. This is fine, even in
2598 // the case where a class object is in the from-space after
2599 // the flip, as a comparison involving such a type would not
2600 // produce a false positive; it may of course produce a false
2601 // negative, in which case we would take the ArraySet slow
2602 // path.
2603
2604 // /* HeapReference<Class> */ temp1 = obj->klass_
2605 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2606 __ MaybeUnpoisonHeapReference(temp1);
2607
2608 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2609 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2610 // /* HeapReference<Class> */ temp2 = value->klass_
2611 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2612 // If heap poisoning is enabled, no need to unpoison `temp1`
2613 // nor `temp2`, as we are comparing two poisoned references.
2614
2615 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2616 Mips64Label do_put;
2617 __ Beqc(temp1, temp2, &do_put);
2618 // If heap poisoning is enabled, the `temp1` reference has
2619 // not been unpoisoned yet; unpoison it now.
2620 __ MaybeUnpoisonHeapReference(temp1);
2621
2622 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2623 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2624 // If heap poisoning is enabled, no need to unpoison
2625 // `temp1`, as we are comparing against null below.
2626 __ Bnezc(temp1, slow_path->GetEntryLabel());
2627 __ Bind(&do_put);
2628 } else {
2629 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2630 }
2631 }
2632
2633 GpuRegister source = value;
2634 if (kPoisonHeapReferences) {
2635 // Note that in the case where `value` is a null reference,
2636 // we do not enter this block, as a null reference does not
2637 // need poisoning.
2638 __ Move(temp1, value);
2639 __ PoisonHeapReference(temp1);
2640 source = temp1;
2641 }
2642
2643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2644 if (index.IsConstant()) {
2645 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002646 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002647 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002648 }
2649 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2650
2651 if (!may_need_runtime_call_for_type_check) {
2652 codegen_->MaybeRecordImplicitNullCheck(instruction);
2653 }
2654
2655 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2656
2657 if (done.IsLinked()) {
2658 __ Bind(&done);
2659 }
2660
2661 if (slow_path != nullptr) {
2662 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002663 }
2664 break;
2665 }
2666
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002667 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002668 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002669 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002670 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002671 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002672 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002673 }
2674 if (value_location.IsConstant()) {
2675 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2676 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2677 } else {
2678 GpuRegister value = value_location.AsRegister<GpuRegister>();
2679 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002680 }
2681 break;
2682 }
2683
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002684 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002685 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002686 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002687 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002688 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002689 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002690 }
2691 if (value_location.IsConstant()) {
2692 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2693 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2694 } else {
2695 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2696 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002697 }
2698 break;
2699 }
2700
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002701 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002702 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002703 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002704 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002705 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002706 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002707 }
2708 if (value_location.IsConstant()) {
2709 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2710 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2711 } else {
2712 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2713 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002714 }
2715 break;
2716 }
2717
Aart Bik66c158e2018-01-31 12:55:04 -08002718 case DataType::Type::kUint32:
2719 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002720 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002721 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2722 UNREACHABLE();
2723 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002724}
2725
2726void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002727 RegisterSet caller_saves = RegisterSet::Empty();
2728 InvokeRuntimeCallingConvention calling_convention;
2729 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2730 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2731 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002732
2733 HInstruction* index = instruction->InputAt(0);
2734 HInstruction* length = instruction->InputAt(1);
2735
2736 bool const_index = false;
2737 bool const_length = false;
2738
2739 if (index->IsConstant()) {
2740 if (length->IsConstant()) {
2741 const_index = true;
2742 const_length = true;
2743 } else {
2744 int32_t index_value = index->AsIntConstant()->GetValue();
2745 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2746 const_index = true;
2747 }
2748 }
2749 } else if (length->IsConstant()) {
2750 int32_t length_value = length->AsIntConstant()->GetValue();
2751 if (IsUint<15>(length_value)) {
2752 const_length = true;
2753 }
2754 }
2755
2756 locations->SetInAt(0, const_index
2757 ? Location::ConstantLocation(index->AsConstant())
2758 : Location::RequiresRegister());
2759 locations->SetInAt(1, const_length
2760 ? Location::ConstantLocation(length->AsConstant())
2761 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002762}
2763
2764void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2765 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002766 Location index_loc = locations->InAt(0);
2767 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002768
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002769 if (length_loc.IsConstant()) {
2770 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2771 if (index_loc.IsConstant()) {
2772 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2773 if (index < 0 || index >= length) {
2774 BoundsCheckSlowPathMIPS64* slow_path =
2775 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2776 codegen_->AddSlowPath(slow_path);
2777 __ Bc(slow_path->GetEntryLabel());
2778 } else {
2779 // Nothing to be done.
2780 }
2781 return;
2782 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002783
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002784 BoundsCheckSlowPathMIPS64* slow_path =
2785 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2786 codegen_->AddSlowPath(slow_path);
2787 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2788 if (length == 0) {
2789 __ Bc(slow_path->GetEntryLabel());
2790 } else if (length == 1) {
2791 __ Bnezc(index, slow_path->GetEntryLabel());
2792 } else {
2793 DCHECK(IsUint<15>(length)) << length;
2794 __ Sltiu(TMP, index, length);
2795 __ Beqzc(TMP, slow_path->GetEntryLabel());
2796 }
2797 } else {
2798 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2799 BoundsCheckSlowPathMIPS64* slow_path =
2800 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2801 codegen_->AddSlowPath(slow_path);
2802 if (index_loc.IsConstant()) {
2803 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2804 if (index < 0) {
2805 __ Bc(slow_path->GetEntryLabel());
2806 } else if (index == 0) {
2807 __ Blezc(length, slow_path->GetEntryLabel());
2808 } else {
2809 DCHECK(IsInt<16>(index + 1)) << index;
2810 __ Sltiu(TMP, length, index + 1);
2811 __ Bnezc(TMP, slow_path->GetEntryLabel());
2812 }
2813 } else {
2814 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2815 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2816 }
2817 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002818}
2819
Alexey Frunze15958152017-02-09 19:08:30 -08002820// Temp is used for read barrier.
2821static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2822 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002823 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002824 (kUseBakerReadBarrier ||
2825 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2826 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2827 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2828 return 1;
2829 }
2830 return 0;
2831}
2832
2833// Extra temp is used for read barrier.
2834static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2835 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2836}
2837
Alexey Frunze4dda3372015-06-01 18:31:49 -07002838void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002839 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
2840 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
2841
2842 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
2843 switch (type_check_kind) {
2844 case TypeCheckKind::kExactCheck:
2845 case TypeCheckKind::kAbstractClassCheck:
2846 case TypeCheckKind::kClassHierarchyCheck:
2847 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08002848 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002849 ? LocationSummary::kCallOnSlowPath
2850 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
2851 break;
2852 case TypeCheckKind::kArrayCheck:
2853 case TypeCheckKind::kUnresolvedCheck:
2854 case TypeCheckKind::kInterfaceCheck:
2855 call_kind = LocationSummary::kCallOnSlowPath;
2856 break;
2857 }
2858
Vladimir Markoca6fff82017-10-03 14:49:14 +01002859 LocationSummary* locations =
2860 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002861 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002862 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002863 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002864}
2865
2866void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002867 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002868 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002869 Location obj_loc = locations->InAt(0);
2870 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002871 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002872 Location temp_loc = locations->GetTemp(0);
2873 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2874 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2875 DCHECK_LE(num_temps, 2u);
2876 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002877 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2878 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2879 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2880 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2881 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2882 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2883 const uint32_t object_array_data_offset =
2884 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2885 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002886
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002887 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
2888 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
2889 // read barriers is done for performance and code size reasons.
2890 bool is_type_check_slow_path_fatal = false;
2891 if (!kEmitCompilerReadBarrier) {
2892 is_type_check_slow_path_fatal =
2893 (type_check_kind == TypeCheckKind::kExactCheck ||
2894 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2895 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2896 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
2897 !instruction->CanThrowIntoCatchBlock();
2898 }
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002899 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002900 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2901 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002902 codegen_->AddSlowPath(slow_path);
2903
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002904 // Avoid this check if we know `obj` is not null.
2905 if (instruction->MustDoNullCheck()) {
2906 __ Beqzc(obj, &done);
2907 }
2908
2909 switch (type_check_kind) {
2910 case TypeCheckKind::kExactCheck:
2911 case TypeCheckKind::kArrayCheck: {
2912 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002913 GenerateReferenceLoadTwoRegisters(instruction,
2914 temp_loc,
2915 obj_loc,
2916 class_offset,
2917 maybe_temp2_loc,
2918 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002919 // Jump to slow path for throwing the exception or doing a
2920 // more involved array check.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002921 __ Bnec(temp, cls, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002922 break;
2923 }
2924
2925 case TypeCheckKind::kAbstractClassCheck: {
2926 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002927 GenerateReferenceLoadTwoRegisters(instruction,
2928 temp_loc,
2929 obj_loc,
2930 class_offset,
2931 maybe_temp2_loc,
2932 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002933 // If the class is abstract, we eagerly fetch the super class of the
2934 // object to avoid doing a comparison we know will fail.
2935 Mips64Label loop;
2936 __ Bind(&loop);
2937 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002938 GenerateReferenceLoadOneRegister(instruction,
2939 temp_loc,
2940 super_offset,
2941 maybe_temp2_loc,
2942 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002943 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2944 // exception.
2945 __ Beqzc(temp, slow_path->GetEntryLabel());
2946 // Otherwise, compare the classes.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002947 __ Bnec(temp, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002948 break;
2949 }
2950
2951 case TypeCheckKind::kClassHierarchyCheck: {
2952 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002953 GenerateReferenceLoadTwoRegisters(instruction,
2954 temp_loc,
2955 obj_loc,
2956 class_offset,
2957 maybe_temp2_loc,
2958 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002959 // Walk over the class hierarchy to find a match.
2960 Mips64Label loop;
2961 __ Bind(&loop);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002962 __ Beqc(temp, cls, &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002963 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002964 GenerateReferenceLoadOneRegister(instruction,
2965 temp_loc,
2966 super_offset,
2967 maybe_temp2_loc,
2968 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002969 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2970 // exception. Otherwise, jump to the beginning of the loop.
2971 __ Bnezc(temp, &loop);
2972 __ Bc(slow_path->GetEntryLabel());
2973 break;
2974 }
2975
2976 case TypeCheckKind::kArrayObjectCheck: {
2977 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002978 GenerateReferenceLoadTwoRegisters(instruction,
2979 temp_loc,
2980 obj_loc,
2981 class_offset,
2982 maybe_temp2_loc,
2983 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002984 // Do an exact check.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002985 __ Beqc(temp, cls, &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002986 // Otherwise, we need to check that the object's class is a non-primitive array.
2987 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002988 GenerateReferenceLoadOneRegister(instruction,
2989 temp_loc,
2990 component_offset,
2991 maybe_temp2_loc,
2992 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002993 // If the component type is null, jump to the slow path to throw the exception.
2994 __ Beqzc(temp, slow_path->GetEntryLabel());
2995 // Otherwise, the object is indeed an array, further check that this component
2996 // type is not a primitive type.
2997 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2998 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2999 __ Bnezc(temp, slow_path->GetEntryLabel());
3000 break;
3001 }
3002
3003 case TypeCheckKind::kUnresolvedCheck:
3004 // We always go into the type check slow path for the unresolved check case.
3005 // We cannot directly call the CheckCast runtime entry point
3006 // without resorting to a type checking slow path here (i.e. by
3007 // calling InvokeRuntime directly), as it would require to
3008 // assign fixed registers for the inputs of this HInstanceOf
3009 // instruction (following the runtime calling convention), which
3010 // might be cluttered by the potential first read barrier
3011 // emission at the beginning of this method.
3012 __ Bc(slow_path->GetEntryLabel());
3013 break;
3014
3015 case TypeCheckKind::kInterfaceCheck: {
3016 // Avoid read barriers to improve performance of the fast path. We can not get false
3017 // positives by doing this.
3018 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003019 GenerateReferenceLoadTwoRegisters(instruction,
3020 temp_loc,
3021 obj_loc,
3022 class_offset,
3023 maybe_temp2_loc,
3024 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003025 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003026 GenerateReferenceLoadTwoRegisters(instruction,
3027 temp_loc,
3028 temp_loc,
3029 iftable_offset,
3030 maybe_temp2_loc,
3031 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003032 // Iftable is never null.
3033 __ Lw(TMP, temp, array_length_offset);
3034 // Loop through the iftable and check if any class matches.
3035 Mips64Label loop;
3036 __ Bind(&loop);
3037 __ Beqzc(TMP, slow_path->GetEntryLabel());
3038 __ Lwu(AT, temp, object_array_data_offset);
3039 __ MaybeUnpoisonHeapReference(AT);
3040 // Go to next interface.
3041 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3042 __ Addiu(TMP, TMP, -2);
3043 // Compare the classes and continue the loop if they do not match.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00003044 __ Bnec(AT, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003045 break;
3046 }
3047 }
3048
3049 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003050 __ Bind(slow_path->GetExitLabel());
3051}
3052
3053void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3054 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003055 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003056 locations->SetInAt(0, Location::RequiresRegister());
3057 if (check->HasUses()) {
3058 locations->SetOut(Location::SameAsFirstInput());
3059 }
3060}
3061
3062void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3063 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003064 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze4dda3372015-06-01 18:31:49 -07003065 check->GetLoadClass(),
3066 check,
3067 check->GetDexPc(),
3068 true);
3069 codegen_->AddSlowPath(slow_path);
3070 GenerateClassInitializationCheck(slow_path,
3071 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3072}
3073
3074void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003075 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003076
Vladimir Markoca6fff82017-10-03 14:49:14 +01003077 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003078
3079 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003080 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003081 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003082 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003083 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003084 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003085 case DataType::Type::kInt32:
3086 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003087 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003088 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003089 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3090 break;
3091
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003092 case DataType::Type::kFloat32:
3093 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003094 locations->SetInAt(0, Location::RequiresFpuRegister());
3095 locations->SetInAt(1, Location::RequiresFpuRegister());
3096 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003097 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003098
3099 default:
3100 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3101 }
3102}
3103
3104void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3105 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003106 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003107 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003108
3109 // 0 if: left == right
3110 // 1 if: left > right
3111 // -1 if: left < right
3112 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003113 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003114 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003115 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003116 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003117 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003118 case DataType::Type::kInt32:
3119 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003120 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003121 Location rhs_location = locations->InAt(1);
3122 bool use_imm = rhs_location.IsConstant();
3123 GpuRegister rhs = ZERO;
3124 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003125 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003126 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3127 if (value != 0) {
3128 rhs = AT;
3129 __ LoadConst64(rhs, value);
3130 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003131 } else {
3132 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3133 if (value != 0) {
3134 rhs = AT;
3135 __ LoadConst32(rhs, value);
3136 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003137 }
3138 } else {
3139 rhs = rhs_location.AsRegister<GpuRegister>();
3140 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003141 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003142 __ Slt(res, rhs, lhs);
3143 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003144 break;
3145 }
3146
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003148 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3149 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3150 Mips64Label done;
3151 __ CmpEqS(FTMP, lhs, rhs);
3152 __ LoadConst32(res, 0);
3153 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003154 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003155 __ CmpLtS(FTMP, lhs, rhs);
3156 __ LoadConst32(res, -1);
3157 __ Bc1nez(FTMP, &done);
3158 __ LoadConst32(res, 1);
3159 } else {
3160 __ CmpLtS(FTMP, rhs, lhs);
3161 __ LoadConst32(res, 1);
3162 __ Bc1nez(FTMP, &done);
3163 __ LoadConst32(res, -1);
3164 }
3165 __ Bind(&done);
3166 break;
3167 }
3168
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003169 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003170 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3171 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3172 Mips64Label done;
3173 __ CmpEqD(FTMP, lhs, rhs);
3174 __ LoadConst32(res, 0);
3175 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003176 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003177 __ CmpLtD(FTMP, lhs, rhs);
3178 __ LoadConst32(res, -1);
3179 __ Bc1nez(FTMP, &done);
3180 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003181 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003182 __ CmpLtD(FTMP, rhs, lhs);
3183 __ LoadConst32(res, 1);
3184 __ Bc1nez(FTMP, &done);
3185 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003186 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003187 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003188 break;
3189 }
3190
3191 default:
3192 LOG(FATAL) << "Unimplemented compare type " << in_type;
3193 }
3194}
3195
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003196void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003197 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003198 switch (instruction->InputAt(0)->GetType()) {
3199 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003200 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003201 locations->SetInAt(0, Location::RequiresRegister());
3202 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3203 break;
3204
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003205 case DataType::Type::kFloat32:
3206 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003207 locations->SetInAt(0, Location::RequiresFpuRegister());
3208 locations->SetInAt(1, Location::RequiresFpuRegister());
3209 break;
3210 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003211 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003212 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3213 }
3214}
3215
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003216void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003217 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003218 return;
3219 }
3220
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003221 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003222 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003223 switch (type) {
3224 default:
3225 // Integer case.
3226 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3227 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003228 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003229 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3230 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003231 case DataType::Type::kFloat32:
3232 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003233 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3234 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003235 }
3236}
3237
Alexey Frunzec857c742015-09-23 15:12:39 -07003238void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3239 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003240 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003241
3242 LocationSummary* locations = instruction->GetLocations();
3243 Location second = locations->InAt(1);
3244 DCHECK(second.IsConstant());
3245
3246 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3247 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3248 int64_t imm = Int64FromConstant(second.GetConstant());
3249 DCHECK(imm == 1 || imm == -1);
3250
3251 if (instruction->IsRem()) {
3252 __ Move(out, ZERO);
3253 } else {
3254 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003255 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003256 __ Subu(out, ZERO, dividend);
3257 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003258 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003259 __ Dsubu(out, ZERO, dividend);
3260 }
3261 } else if (out != dividend) {
3262 __ Move(out, dividend);
3263 }
3264 }
3265}
3266
3267void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3268 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003269 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003270
3271 LocationSummary* locations = instruction->GetLocations();
3272 Location second = locations->InAt(1);
3273 DCHECK(second.IsConstant());
3274
3275 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3276 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3277 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003278 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003279 int ctz_imm = CTZ(abs_imm);
3280
3281 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003282 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003283 if (ctz_imm == 1) {
3284 // Fast path for division by +/-2, which is very common.
3285 __ Srl(TMP, dividend, 31);
3286 } else {
3287 __ Sra(TMP, dividend, 31);
3288 __ Srl(TMP, TMP, 32 - ctz_imm);
3289 }
3290 __ Addu(out, dividend, TMP);
3291 __ Sra(out, out, ctz_imm);
3292 if (imm < 0) {
3293 __ Subu(out, ZERO, out);
3294 }
3295 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003296 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003297 if (ctz_imm == 1) {
3298 // Fast path for division by +/-2, which is very common.
3299 __ Dsrl32(TMP, dividend, 31);
3300 } else {
3301 __ Dsra32(TMP, dividend, 31);
3302 if (ctz_imm > 32) {
3303 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3304 } else {
3305 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3306 }
3307 }
3308 __ Daddu(out, dividend, TMP);
3309 if (ctz_imm < 32) {
3310 __ Dsra(out, out, ctz_imm);
3311 } else {
3312 __ Dsra32(out, out, ctz_imm - 32);
3313 }
3314 if (imm < 0) {
3315 __ Dsubu(out, ZERO, out);
3316 }
3317 }
3318 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003319 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003320 if (ctz_imm == 1) {
3321 // Fast path for modulo +/-2, which is very common.
3322 __ Sra(TMP, dividend, 31);
3323 __ Subu(out, dividend, TMP);
3324 __ Andi(out, out, 1);
3325 __ Addu(out, out, TMP);
3326 } else {
3327 __ Sra(TMP, dividend, 31);
3328 __ Srl(TMP, TMP, 32 - ctz_imm);
3329 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003330 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003331 __ Subu(out, out, TMP);
3332 }
3333 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003334 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003335 if (ctz_imm == 1) {
3336 // Fast path for modulo +/-2, which is very common.
3337 __ Dsra32(TMP, dividend, 31);
3338 __ Dsubu(out, dividend, TMP);
3339 __ Andi(out, out, 1);
3340 __ Daddu(out, out, TMP);
3341 } else {
3342 __ Dsra32(TMP, dividend, 31);
3343 if (ctz_imm > 32) {
3344 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3345 } else {
3346 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3347 }
3348 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003349 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003350 __ Dsubu(out, out, TMP);
3351 }
3352 }
3353 }
3354}
3355
3356void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3357 DCHECK(instruction->IsDiv() || instruction->IsRem());
3358
3359 LocationSummary* locations = instruction->GetLocations();
3360 Location second = locations->InAt(1);
3361 DCHECK(second.IsConstant());
3362
3363 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3364 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3365 int64_t imm = Int64FromConstant(second.GetConstant());
3366
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003367 DataType::Type type = instruction->GetResultType();
3368 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003369
3370 int64_t magic;
3371 int shift;
3372 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003373 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003374 &magic,
3375 &shift);
3376
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003377 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003378 __ LoadConst32(TMP, magic);
3379 __ MuhR6(TMP, dividend, TMP);
3380
3381 if (imm > 0 && magic < 0) {
3382 __ Addu(TMP, TMP, dividend);
3383 } else if (imm < 0 && magic > 0) {
3384 __ Subu(TMP, TMP, dividend);
3385 }
3386
3387 if (shift != 0) {
3388 __ Sra(TMP, TMP, shift);
3389 }
3390
3391 if (instruction->IsDiv()) {
3392 __ Sra(out, TMP, 31);
3393 __ Subu(out, TMP, out);
3394 } else {
3395 __ Sra(AT, TMP, 31);
3396 __ Subu(AT, TMP, AT);
3397 __ LoadConst32(TMP, imm);
3398 __ MulR6(TMP, AT, TMP);
3399 __ Subu(out, dividend, TMP);
3400 }
3401 } else {
3402 __ LoadConst64(TMP, magic);
3403 __ Dmuh(TMP, dividend, TMP);
3404
3405 if (imm > 0 && magic < 0) {
3406 __ Daddu(TMP, TMP, dividend);
3407 } else if (imm < 0 && magic > 0) {
3408 __ Dsubu(TMP, TMP, dividend);
3409 }
3410
3411 if (shift >= 32) {
3412 __ Dsra32(TMP, TMP, shift - 32);
3413 } else if (shift > 0) {
3414 __ Dsra(TMP, TMP, shift);
3415 }
3416
3417 if (instruction->IsDiv()) {
3418 __ Dsra32(out, TMP, 31);
3419 __ Dsubu(out, TMP, out);
3420 } else {
3421 __ Dsra32(AT, TMP, 31);
3422 __ Dsubu(AT, TMP, AT);
3423 __ LoadConst64(TMP, imm);
3424 __ Dmul(TMP, AT, TMP);
3425 __ Dsubu(out, dividend, TMP);
3426 }
3427 }
3428}
3429
3430void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3431 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003432 DataType::Type type = instruction->GetResultType();
3433 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003434
3435 LocationSummary* locations = instruction->GetLocations();
3436 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3437 Location second = locations->InAt(1);
3438
3439 if (second.IsConstant()) {
3440 int64_t imm = Int64FromConstant(second.GetConstant());
3441 if (imm == 0) {
3442 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3443 } else if (imm == 1 || imm == -1) {
3444 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003445 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003446 DivRemByPowerOfTwo(instruction);
3447 } else {
3448 DCHECK(imm <= -2 || imm >= 2);
3449 GenerateDivRemWithAnyConstant(instruction);
3450 }
3451 } else {
3452 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3453 GpuRegister divisor = second.AsRegister<GpuRegister>();
3454 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003455 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003456 __ DivR6(out, dividend, divisor);
3457 else
3458 __ Ddiv(out, dividend, divisor);
3459 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003460 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003461 __ ModR6(out, dividend, divisor);
3462 else
3463 __ Dmod(out, dividend, divisor);
3464 }
3465 }
3466}
3467
Alexey Frunze4dda3372015-06-01 18:31:49 -07003468void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3469 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003470 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003471 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003472 case DataType::Type::kInt32:
3473 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003474 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003475 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003476 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3477 break;
3478
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003479 case DataType::Type::kFloat32:
3480 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003481 locations->SetInAt(0, Location::RequiresFpuRegister());
3482 locations->SetInAt(1, Location::RequiresFpuRegister());
3483 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3484 break;
3485
3486 default:
3487 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3488 }
3489}
3490
3491void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003492 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003493 LocationSummary* locations = instruction->GetLocations();
3494
3495 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003496 case DataType::Type::kInt32:
3497 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003498 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003499 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003500 case DataType::Type::kFloat32:
3501 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003502 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3503 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3504 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003505 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003506 __ DivS(dst, lhs, rhs);
3507 else
3508 __ DivD(dst, lhs, rhs);
3509 break;
3510 }
3511 default:
3512 LOG(FATAL) << "Unexpected div type " << type;
3513 }
3514}
3515
3516void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003517 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003518 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003519}
3520
3521void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3522 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003523 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003524 codegen_->AddSlowPath(slow_path);
3525 Location value = instruction->GetLocations()->InAt(0);
3526
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003527 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003528
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003529 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003530 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003531 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003532 }
3533
3534 if (value.IsConstant()) {
3535 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3536 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003537 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003538 } else {
3539 // A division by a non-null constant is valid. We don't need to perform
3540 // any check, so simply fall through.
3541 }
3542 } else {
3543 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3544 }
3545}
3546
3547void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3548 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003549 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003550 locations->SetOut(Location::ConstantLocation(constant));
3551}
3552
3553void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3554 // Will be generated at use site.
3555}
3556
3557void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3558 exit->SetLocations(nullptr);
3559}
3560
3561void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3562}
3563
3564void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3565 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003566 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003567 locations->SetOut(Location::ConstantLocation(constant));
3568}
3569
3570void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3571 // Will be generated at use site.
3572}
3573
David Brazdilfc6a86a2015-06-26 10:33:45 +00003574void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003575 if (successor->IsExitBlock()) {
3576 DCHECK(got->GetPrevious()->AlwaysThrows());
3577 return; // no code needed
3578 }
3579
Alexey Frunze4dda3372015-06-01 18:31:49 -07003580 HBasicBlock* block = got->GetBlock();
3581 HInstruction* previous = got->GetPrevious();
3582 HLoopInformation* info = block->GetLoopInformation();
3583
3584 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003585 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3586 return;
3587 }
3588 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3589 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3590 }
3591 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003592 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003593 }
3594}
3595
David Brazdilfc6a86a2015-06-26 10:33:45 +00003596void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3597 got->SetLocations(nullptr);
3598}
3599
3600void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3601 HandleGoto(got, got->GetSuccessor());
3602}
3603
3604void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3605 try_boundary->SetLocations(nullptr);
3606}
3607
3608void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3609 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3610 if (!successor->IsExitBlock()) {
3611 HandleGoto(try_boundary, successor);
3612 }
3613}
3614
Alexey Frunze299a9392015-12-08 16:08:02 -08003615void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3616 bool is64bit,
3617 LocationSummary* locations) {
3618 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3619 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3620 Location rhs_location = locations->InAt(1);
3621 GpuRegister rhs_reg = ZERO;
3622 int64_t rhs_imm = 0;
3623 bool use_imm = rhs_location.IsConstant();
3624 if (use_imm) {
3625 if (is64bit) {
3626 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3627 } else {
3628 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3629 }
3630 } else {
3631 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3632 }
3633 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3634
3635 switch (cond) {
3636 case kCondEQ:
3637 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003638 if (use_imm && IsInt<16>(-rhs_imm)) {
3639 if (rhs_imm == 0) {
3640 if (cond == kCondEQ) {
3641 __ Sltiu(dst, lhs, 1);
3642 } else {
3643 __ Sltu(dst, ZERO, lhs);
3644 }
3645 } else {
3646 if (is64bit) {
3647 __ Daddiu(dst, lhs, -rhs_imm);
3648 } else {
3649 __ Addiu(dst, lhs, -rhs_imm);
3650 }
3651 if (cond == kCondEQ) {
3652 __ Sltiu(dst, dst, 1);
3653 } else {
3654 __ Sltu(dst, ZERO, dst);
3655 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003656 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003657 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003658 if (use_imm && IsUint<16>(rhs_imm)) {
3659 __ Xori(dst, lhs, rhs_imm);
3660 } else {
3661 if (use_imm) {
3662 rhs_reg = TMP;
3663 __ LoadConst64(rhs_reg, rhs_imm);
3664 }
3665 __ Xor(dst, lhs, rhs_reg);
3666 }
3667 if (cond == kCondEQ) {
3668 __ Sltiu(dst, dst, 1);
3669 } else {
3670 __ Sltu(dst, ZERO, dst);
3671 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003672 }
3673 break;
3674
3675 case kCondLT:
3676 case kCondGE:
3677 if (use_imm && IsInt<16>(rhs_imm)) {
3678 __ Slti(dst, lhs, rhs_imm);
3679 } else {
3680 if (use_imm) {
3681 rhs_reg = TMP;
3682 __ LoadConst64(rhs_reg, rhs_imm);
3683 }
3684 __ Slt(dst, lhs, rhs_reg);
3685 }
3686 if (cond == kCondGE) {
3687 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3688 // only the slt instruction but no sge.
3689 __ Xori(dst, dst, 1);
3690 }
3691 break;
3692
3693 case kCondLE:
3694 case kCondGT:
3695 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3696 // Simulate lhs <= rhs via lhs < rhs + 1.
3697 __ Slti(dst, lhs, rhs_imm_plus_one);
3698 if (cond == kCondGT) {
3699 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3700 // only the slti instruction but no sgti.
3701 __ Xori(dst, dst, 1);
3702 }
3703 } else {
3704 if (use_imm) {
3705 rhs_reg = TMP;
3706 __ LoadConst64(rhs_reg, rhs_imm);
3707 }
3708 __ Slt(dst, rhs_reg, lhs);
3709 if (cond == kCondLE) {
3710 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3711 // only the slt instruction but no sle.
3712 __ Xori(dst, dst, 1);
3713 }
3714 }
3715 break;
3716
3717 case kCondB:
3718 case kCondAE:
3719 if (use_imm && IsInt<16>(rhs_imm)) {
3720 // Sltiu sign-extends its 16-bit immediate operand before
3721 // the comparison and thus lets us compare directly with
3722 // unsigned values in the ranges [0, 0x7fff] and
3723 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3724 __ Sltiu(dst, lhs, rhs_imm);
3725 } else {
3726 if (use_imm) {
3727 rhs_reg = TMP;
3728 __ LoadConst64(rhs_reg, rhs_imm);
3729 }
3730 __ Sltu(dst, lhs, rhs_reg);
3731 }
3732 if (cond == kCondAE) {
3733 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3734 // only the sltu instruction but no sgeu.
3735 __ Xori(dst, dst, 1);
3736 }
3737 break;
3738
3739 case kCondBE:
3740 case kCondA:
3741 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3742 // Simulate lhs <= rhs via lhs < rhs + 1.
3743 // Note that this only works if rhs + 1 does not overflow
3744 // to 0, hence the check above.
3745 // Sltiu sign-extends its 16-bit immediate operand before
3746 // the comparison and thus lets us compare directly with
3747 // unsigned values in the ranges [0, 0x7fff] and
3748 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3749 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3750 if (cond == kCondA) {
3751 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3752 // only the sltiu instruction but no sgtiu.
3753 __ Xori(dst, dst, 1);
3754 }
3755 } else {
3756 if (use_imm) {
3757 rhs_reg = TMP;
3758 __ LoadConst64(rhs_reg, rhs_imm);
3759 }
3760 __ Sltu(dst, rhs_reg, lhs);
3761 if (cond == kCondBE) {
3762 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3763 // only the sltu instruction but no sleu.
3764 __ Xori(dst, dst, 1);
3765 }
3766 }
3767 break;
3768 }
3769}
3770
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003771bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3772 bool is64bit,
3773 LocationSummary* input_locations,
3774 GpuRegister dst) {
3775 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3776 Location rhs_location = input_locations->InAt(1);
3777 GpuRegister rhs_reg = ZERO;
3778 int64_t rhs_imm = 0;
3779 bool use_imm = rhs_location.IsConstant();
3780 if (use_imm) {
3781 if (is64bit) {
3782 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3783 } else {
3784 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3785 }
3786 } else {
3787 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3788 }
3789 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3790
3791 switch (cond) {
3792 case kCondEQ:
3793 case kCondNE:
3794 if (use_imm && IsInt<16>(-rhs_imm)) {
3795 if (is64bit) {
3796 __ Daddiu(dst, lhs, -rhs_imm);
3797 } else {
3798 __ Addiu(dst, lhs, -rhs_imm);
3799 }
3800 } else if (use_imm && IsUint<16>(rhs_imm)) {
3801 __ Xori(dst, lhs, rhs_imm);
3802 } else {
3803 if (use_imm) {
3804 rhs_reg = TMP;
3805 __ LoadConst64(rhs_reg, rhs_imm);
3806 }
3807 __ Xor(dst, lhs, rhs_reg);
3808 }
3809 return (cond == kCondEQ);
3810
3811 case kCondLT:
3812 case kCondGE:
3813 if (use_imm && IsInt<16>(rhs_imm)) {
3814 __ Slti(dst, lhs, rhs_imm);
3815 } else {
3816 if (use_imm) {
3817 rhs_reg = TMP;
3818 __ LoadConst64(rhs_reg, rhs_imm);
3819 }
3820 __ Slt(dst, lhs, rhs_reg);
3821 }
3822 return (cond == kCondGE);
3823
3824 case kCondLE:
3825 case kCondGT:
3826 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3827 // Simulate lhs <= rhs via lhs < rhs + 1.
3828 __ Slti(dst, lhs, rhs_imm_plus_one);
3829 return (cond == kCondGT);
3830 } else {
3831 if (use_imm) {
3832 rhs_reg = TMP;
3833 __ LoadConst64(rhs_reg, rhs_imm);
3834 }
3835 __ Slt(dst, rhs_reg, lhs);
3836 return (cond == kCondLE);
3837 }
3838
3839 case kCondB:
3840 case kCondAE:
3841 if (use_imm && IsInt<16>(rhs_imm)) {
3842 // Sltiu sign-extends its 16-bit immediate operand before
3843 // the comparison and thus lets us compare directly with
3844 // unsigned values in the ranges [0, 0x7fff] and
3845 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3846 __ Sltiu(dst, lhs, rhs_imm);
3847 } else {
3848 if (use_imm) {
3849 rhs_reg = TMP;
3850 __ LoadConst64(rhs_reg, rhs_imm);
3851 }
3852 __ Sltu(dst, lhs, rhs_reg);
3853 }
3854 return (cond == kCondAE);
3855
3856 case kCondBE:
3857 case kCondA:
3858 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3859 // Simulate lhs <= rhs via lhs < rhs + 1.
3860 // Note that this only works if rhs + 1 does not overflow
3861 // to 0, hence the check above.
3862 // Sltiu sign-extends its 16-bit immediate operand before
3863 // the comparison and thus lets us compare directly with
3864 // unsigned values in the ranges [0, 0x7fff] and
3865 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3866 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3867 return (cond == kCondA);
3868 } else {
3869 if (use_imm) {
3870 rhs_reg = TMP;
3871 __ LoadConst64(rhs_reg, rhs_imm);
3872 }
3873 __ Sltu(dst, rhs_reg, lhs);
3874 return (cond == kCondBE);
3875 }
3876 }
3877}
3878
Alexey Frunze299a9392015-12-08 16:08:02 -08003879void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3880 bool is64bit,
3881 LocationSummary* locations,
3882 Mips64Label* label) {
3883 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3884 Location rhs_location = locations->InAt(1);
3885 GpuRegister rhs_reg = ZERO;
3886 int64_t rhs_imm = 0;
3887 bool use_imm = rhs_location.IsConstant();
3888 if (use_imm) {
3889 if (is64bit) {
3890 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3891 } else {
3892 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3893 }
3894 } else {
3895 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3896 }
3897
3898 if (use_imm && rhs_imm == 0) {
3899 switch (cond) {
3900 case kCondEQ:
3901 case kCondBE: // <= 0 if zero
3902 __ Beqzc(lhs, label);
3903 break;
3904 case kCondNE:
3905 case kCondA: // > 0 if non-zero
3906 __ Bnezc(lhs, label);
3907 break;
3908 case kCondLT:
3909 __ Bltzc(lhs, label);
3910 break;
3911 case kCondGE:
3912 __ Bgezc(lhs, label);
3913 break;
3914 case kCondLE:
3915 __ Blezc(lhs, label);
3916 break;
3917 case kCondGT:
3918 __ Bgtzc(lhs, label);
3919 break;
3920 case kCondB: // always false
3921 break;
3922 case kCondAE: // always true
3923 __ Bc(label);
3924 break;
3925 }
3926 } else {
3927 if (use_imm) {
3928 rhs_reg = TMP;
3929 __ LoadConst64(rhs_reg, rhs_imm);
3930 }
3931 switch (cond) {
3932 case kCondEQ:
3933 __ Beqc(lhs, rhs_reg, label);
3934 break;
3935 case kCondNE:
3936 __ Bnec(lhs, rhs_reg, label);
3937 break;
3938 case kCondLT:
3939 __ Bltc(lhs, rhs_reg, label);
3940 break;
3941 case kCondGE:
3942 __ Bgec(lhs, rhs_reg, label);
3943 break;
3944 case kCondLE:
3945 __ Bgec(rhs_reg, lhs, label);
3946 break;
3947 case kCondGT:
3948 __ Bltc(rhs_reg, lhs, label);
3949 break;
3950 case kCondB:
3951 __ Bltuc(lhs, rhs_reg, label);
3952 break;
3953 case kCondAE:
3954 __ Bgeuc(lhs, rhs_reg, label);
3955 break;
3956 case kCondBE:
3957 __ Bgeuc(rhs_reg, lhs, label);
3958 break;
3959 case kCondA:
3960 __ Bltuc(rhs_reg, lhs, label);
3961 break;
3962 }
3963 }
3964}
3965
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003966void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3967 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003968 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003969 LocationSummary* locations) {
3970 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3971 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3972 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003973 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003974 switch (cond) {
3975 case kCondEQ:
3976 __ CmpEqS(FTMP, lhs, rhs);
3977 __ Mfc1(dst, FTMP);
3978 __ Andi(dst, dst, 1);
3979 break;
3980 case kCondNE:
3981 __ CmpEqS(FTMP, lhs, rhs);
3982 __ Mfc1(dst, FTMP);
3983 __ Addiu(dst, dst, 1);
3984 break;
3985 case kCondLT:
3986 if (gt_bias) {
3987 __ CmpLtS(FTMP, lhs, rhs);
3988 } else {
3989 __ CmpUltS(FTMP, lhs, rhs);
3990 }
3991 __ Mfc1(dst, FTMP);
3992 __ Andi(dst, dst, 1);
3993 break;
3994 case kCondLE:
3995 if (gt_bias) {
3996 __ CmpLeS(FTMP, lhs, rhs);
3997 } else {
3998 __ CmpUleS(FTMP, lhs, rhs);
3999 }
4000 __ Mfc1(dst, FTMP);
4001 __ Andi(dst, dst, 1);
4002 break;
4003 case kCondGT:
4004 if (gt_bias) {
4005 __ CmpUltS(FTMP, rhs, lhs);
4006 } else {
4007 __ CmpLtS(FTMP, rhs, lhs);
4008 }
4009 __ Mfc1(dst, FTMP);
4010 __ Andi(dst, dst, 1);
4011 break;
4012 case kCondGE:
4013 if (gt_bias) {
4014 __ CmpUleS(FTMP, rhs, lhs);
4015 } else {
4016 __ CmpLeS(FTMP, rhs, lhs);
4017 }
4018 __ Mfc1(dst, FTMP);
4019 __ Andi(dst, dst, 1);
4020 break;
4021 default:
4022 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4023 UNREACHABLE();
4024 }
4025 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004026 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004027 switch (cond) {
4028 case kCondEQ:
4029 __ CmpEqD(FTMP, lhs, rhs);
4030 __ Mfc1(dst, FTMP);
4031 __ Andi(dst, dst, 1);
4032 break;
4033 case kCondNE:
4034 __ CmpEqD(FTMP, lhs, rhs);
4035 __ Mfc1(dst, FTMP);
4036 __ Addiu(dst, dst, 1);
4037 break;
4038 case kCondLT:
4039 if (gt_bias) {
4040 __ CmpLtD(FTMP, lhs, rhs);
4041 } else {
4042 __ CmpUltD(FTMP, lhs, rhs);
4043 }
4044 __ Mfc1(dst, FTMP);
4045 __ Andi(dst, dst, 1);
4046 break;
4047 case kCondLE:
4048 if (gt_bias) {
4049 __ CmpLeD(FTMP, lhs, rhs);
4050 } else {
4051 __ CmpUleD(FTMP, lhs, rhs);
4052 }
4053 __ Mfc1(dst, FTMP);
4054 __ Andi(dst, dst, 1);
4055 break;
4056 case kCondGT:
4057 if (gt_bias) {
4058 __ CmpUltD(FTMP, rhs, lhs);
4059 } else {
4060 __ CmpLtD(FTMP, rhs, lhs);
4061 }
4062 __ Mfc1(dst, FTMP);
4063 __ Andi(dst, dst, 1);
4064 break;
4065 case kCondGE:
4066 if (gt_bias) {
4067 __ CmpUleD(FTMP, rhs, lhs);
4068 } else {
4069 __ CmpLeD(FTMP, rhs, lhs);
4070 }
4071 __ Mfc1(dst, FTMP);
4072 __ Andi(dst, dst, 1);
4073 break;
4074 default:
4075 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4076 UNREACHABLE();
4077 }
4078 }
4079}
4080
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004081bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4082 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004083 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004084 LocationSummary* input_locations,
4085 FpuRegister dst) {
4086 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4087 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004088 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004089 switch (cond) {
4090 case kCondEQ:
4091 __ CmpEqS(dst, lhs, rhs);
4092 return false;
4093 case kCondNE:
4094 __ CmpEqS(dst, lhs, rhs);
4095 return true;
4096 case kCondLT:
4097 if (gt_bias) {
4098 __ CmpLtS(dst, lhs, rhs);
4099 } else {
4100 __ CmpUltS(dst, lhs, rhs);
4101 }
4102 return false;
4103 case kCondLE:
4104 if (gt_bias) {
4105 __ CmpLeS(dst, lhs, rhs);
4106 } else {
4107 __ CmpUleS(dst, lhs, rhs);
4108 }
4109 return false;
4110 case kCondGT:
4111 if (gt_bias) {
4112 __ CmpUltS(dst, rhs, lhs);
4113 } else {
4114 __ CmpLtS(dst, rhs, lhs);
4115 }
4116 return false;
4117 case kCondGE:
4118 if (gt_bias) {
4119 __ CmpUleS(dst, rhs, lhs);
4120 } else {
4121 __ CmpLeS(dst, rhs, lhs);
4122 }
4123 return false;
4124 default:
4125 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4126 UNREACHABLE();
4127 }
4128 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004129 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004130 switch (cond) {
4131 case kCondEQ:
4132 __ CmpEqD(dst, lhs, rhs);
4133 return false;
4134 case kCondNE:
4135 __ CmpEqD(dst, lhs, rhs);
4136 return true;
4137 case kCondLT:
4138 if (gt_bias) {
4139 __ CmpLtD(dst, lhs, rhs);
4140 } else {
4141 __ CmpUltD(dst, lhs, rhs);
4142 }
4143 return false;
4144 case kCondLE:
4145 if (gt_bias) {
4146 __ CmpLeD(dst, lhs, rhs);
4147 } else {
4148 __ CmpUleD(dst, lhs, rhs);
4149 }
4150 return false;
4151 case kCondGT:
4152 if (gt_bias) {
4153 __ CmpUltD(dst, rhs, lhs);
4154 } else {
4155 __ CmpLtD(dst, rhs, lhs);
4156 }
4157 return false;
4158 case kCondGE:
4159 if (gt_bias) {
4160 __ CmpUleD(dst, rhs, lhs);
4161 } else {
4162 __ CmpLeD(dst, rhs, lhs);
4163 }
4164 return false;
4165 default:
4166 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4167 UNREACHABLE();
4168 }
4169 }
4170}
4171
Alexey Frunze299a9392015-12-08 16:08:02 -08004172void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4173 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004174 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004175 LocationSummary* locations,
4176 Mips64Label* label) {
4177 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4178 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004179 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004180 switch (cond) {
4181 case kCondEQ:
4182 __ CmpEqS(FTMP, lhs, rhs);
4183 __ Bc1nez(FTMP, label);
4184 break;
4185 case kCondNE:
4186 __ CmpEqS(FTMP, lhs, rhs);
4187 __ Bc1eqz(FTMP, label);
4188 break;
4189 case kCondLT:
4190 if (gt_bias) {
4191 __ CmpLtS(FTMP, lhs, rhs);
4192 } else {
4193 __ CmpUltS(FTMP, lhs, rhs);
4194 }
4195 __ Bc1nez(FTMP, label);
4196 break;
4197 case kCondLE:
4198 if (gt_bias) {
4199 __ CmpLeS(FTMP, lhs, rhs);
4200 } else {
4201 __ CmpUleS(FTMP, lhs, rhs);
4202 }
4203 __ Bc1nez(FTMP, label);
4204 break;
4205 case kCondGT:
4206 if (gt_bias) {
4207 __ CmpUltS(FTMP, rhs, lhs);
4208 } else {
4209 __ CmpLtS(FTMP, rhs, lhs);
4210 }
4211 __ Bc1nez(FTMP, label);
4212 break;
4213 case kCondGE:
4214 if (gt_bias) {
4215 __ CmpUleS(FTMP, rhs, lhs);
4216 } else {
4217 __ CmpLeS(FTMP, rhs, lhs);
4218 }
4219 __ Bc1nez(FTMP, label);
4220 break;
4221 default:
4222 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004223 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004224 }
4225 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004226 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004227 switch (cond) {
4228 case kCondEQ:
4229 __ CmpEqD(FTMP, lhs, rhs);
4230 __ Bc1nez(FTMP, label);
4231 break;
4232 case kCondNE:
4233 __ CmpEqD(FTMP, lhs, rhs);
4234 __ Bc1eqz(FTMP, label);
4235 break;
4236 case kCondLT:
4237 if (gt_bias) {
4238 __ CmpLtD(FTMP, lhs, rhs);
4239 } else {
4240 __ CmpUltD(FTMP, lhs, rhs);
4241 }
4242 __ Bc1nez(FTMP, label);
4243 break;
4244 case kCondLE:
4245 if (gt_bias) {
4246 __ CmpLeD(FTMP, lhs, rhs);
4247 } else {
4248 __ CmpUleD(FTMP, lhs, rhs);
4249 }
4250 __ Bc1nez(FTMP, label);
4251 break;
4252 case kCondGT:
4253 if (gt_bias) {
4254 __ CmpUltD(FTMP, rhs, lhs);
4255 } else {
4256 __ CmpLtD(FTMP, rhs, lhs);
4257 }
4258 __ Bc1nez(FTMP, label);
4259 break;
4260 case kCondGE:
4261 if (gt_bias) {
4262 __ CmpUleD(FTMP, rhs, lhs);
4263 } else {
4264 __ CmpLeD(FTMP, rhs, lhs);
4265 }
4266 __ Bc1nez(FTMP, label);
4267 break;
4268 default:
4269 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004270 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004271 }
4272 }
4273}
4274
Alexey Frunze4dda3372015-06-01 18:31:49 -07004275void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004276 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004277 Mips64Label* true_target,
4278 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004279 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004280
David Brazdil0debae72015-11-12 18:37:00 +00004281 if (true_target == nullptr && false_target == nullptr) {
4282 // Nothing to do. The code always falls through.
4283 return;
4284 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004285 // Constant condition, statically compared against "true" (integer value 1).
4286 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004287 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004288 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004289 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004290 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004291 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004292 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004293 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004294 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004295 }
David Brazdil0debae72015-11-12 18:37:00 +00004296 return;
4297 }
4298
4299 // The following code generates these patterns:
4300 // (1) true_target == nullptr && false_target != nullptr
4301 // - opposite condition true => branch to false_target
4302 // (2) true_target != nullptr && false_target == nullptr
4303 // - condition true => branch to true_target
4304 // (3) true_target != nullptr && false_target != nullptr
4305 // - condition true => branch to true_target
4306 // - branch to false_target
4307 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004308 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004309 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004310 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004311 if (true_target == nullptr) {
4312 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4313 } else {
4314 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4315 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004316 } else {
4317 // The condition instruction has not been materialized, use its inputs as
4318 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004319 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004320 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004321 LocationSummary* locations = cond->GetLocations();
4322 IfCondition if_cond = condition->GetCondition();
4323 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004324
David Brazdil0debae72015-11-12 18:37:00 +00004325 if (true_target == nullptr) {
4326 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004327 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004328 }
4329
Alexey Frunze299a9392015-12-08 16:08:02 -08004330 switch (type) {
4331 default:
4332 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4333 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004334 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004335 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4336 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004337 case DataType::Type::kFloat32:
4338 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004339 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4340 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004341 }
4342 }
David Brazdil0debae72015-11-12 18:37:00 +00004343
4344 // If neither branch falls through (case 3), the conditional branch to `true_target`
4345 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4346 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004347 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004348 }
4349}
4350
4351void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004352 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004353 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004354 locations->SetInAt(0, Location::RequiresRegister());
4355 }
4356}
4357
4358void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004359 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4360 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004361 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004362 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004363 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004364 nullptr : codegen_->GetLabelOf(false_successor);
4365 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004366}
4367
4368void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004369 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004370 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004371 InvokeRuntimeCallingConvention calling_convention;
4372 RegisterSet caller_saves = RegisterSet::Empty();
4373 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4374 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004375 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004376 locations->SetInAt(0, Location::RequiresRegister());
4377 }
4378}
4379
4380void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004381 SlowPathCodeMIPS64* slow_path =
4382 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004383 GenerateTestAndBranch(deoptimize,
4384 /* condition_input_index */ 0,
4385 slow_path->GetEntryLabel(),
4386 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004387}
4388
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004389// This function returns true if a conditional move can be generated for HSelect.
4390// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4391// branches and regular moves.
4392//
4393// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4394//
4395// While determining feasibility of a conditional move and setting inputs/outputs
4396// are two distinct tasks, this function does both because they share quite a bit
4397// of common logic.
4398static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4399 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4400 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4401 HCondition* condition = cond->AsCondition();
4402
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004403 DataType::Type cond_type =
4404 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4405 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004406
4407 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4408 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4409 bool is_true_value_zero_constant =
4410 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4411 bool is_false_value_zero_constant =
4412 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4413
4414 bool can_move_conditionally = false;
4415 bool use_const_for_false_in = false;
4416 bool use_const_for_true_in = false;
4417
4418 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004419 if (!DataType::IsFloatingPointType(cond_type)) {
4420 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004421 // Moving int/long on int/long condition.
4422 if (is_true_value_zero_constant) {
4423 // seleqz out_reg, false_reg, cond_reg
4424 can_move_conditionally = true;
4425 use_const_for_true_in = true;
4426 } else if (is_false_value_zero_constant) {
4427 // selnez out_reg, true_reg, cond_reg
4428 can_move_conditionally = true;
4429 use_const_for_false_in = true;
4430 } else if (materialized) {
4431 // Not materializing unmaterialized int conditions
4432 // to keep the instruction count low.
4433 // selnez AT, true_reg, cond_reg
4434 // seleqz TMP, false_reg, cond_reg
4435 // or out_reg, AT, TMP
4436 can_move_conditionally = true;
4437 }
4438 } else {
4439 // Moving float/double on int/long condition.
4440 if (materialized) {
4441 // Not materializing unmaterialized int conditions
4442 // to keep the instruction count low.
4443 can_move_conditionally = true;
4444 if (is_true_value_zero_constant) {
4445 // sltu TMP, ZERO, cond_reg
4446 // mtc1 TMP, temp_cond_reg
4447 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4448 use_const_for_true_in = true;
4449 } else if (is_false_value_zero_constant) {
4450 // sltu TMP, ZERO, cond_reg
4451 // mtc1 TMP, temp_cond_reg
4452 // selnez.fmt out_reg, true_reg, temp_cond_reg
4453 use_const_for_false_in = true;
4454 } else {
4455 // sltu TMP, ZERO, cond_reg
4456 // mtc1 TMP, temp_cond_reg
4457 // sel.fmt temp_cond_reg, false_reg, true_reg
4458 // mov.fmt out_reg, temp_cond_reg
4459 }
4460 }
4461 }
4462 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004463 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004464 // Moving int/long on float/double condition.
4465 can_move_conditionally = true;
4466 if (is_true_value_zero_constant) {
4467 // mfc1 TMP, temp_cond_reg
4468 // seleqz out_reg, false_reg, TMP
4469 use_const_for_true_in = true;
4470 } else if (is_false_value_zero_constant) {
4471 // mfc1 TMP, temp_cond_reg
4472 // selnez out_reg, true_reg, TMP
4473 use_const_for_false_in = true;
4474 } else {
4475 // mfc1 TMP, temp_cond_reg
4476 // selnez AT, true_reg, TMP
4477 // seleqz TMP, false_reg, TMP
4478 // or out_reg, AT, TMP
4479 }
4480 } else {
4481 // Moving float/double on float/double condition.
4482 can_move_conditionally = true;
4483 if (is_true_value_zero_constant) {
4484 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4485 use_const_for_true_in = true;
4486 } else if (is_false_value_zero_constant) {
4487 // selnez.fmt out_reg, true_reg, temp_cond_reg
4488 use_const_for_false_in = true;
4489 } else {
4490 // sel.fmt temp_cond_reg, false_reg, true_reg
4491 // mov.fmt out_reg, temp_cond_reg
4492 }
4493 }
4494 }
4495 }
4496
4497 if (can_move_conditionally) {
4498 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4499 } else {
4500 DCHECK(!use_const_for_false_in);
4501 DCHECK(!use_const_for_true_in);
4502 }
4503
4504 if (locations_to_set != nullptr) {
4505 if (use_const_for_false_in) {
4506 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4507 } else {
4508 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004509 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004510 ? Location::RequiresFpuRegister()
4511 : Location::RequiresRegister());
4512 }
4513 if (use_const_for_true_in) {
4514 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4515 } else {
4516 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004517 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004518 ? Location::RequiresFpuRegister()
4519 : Location::RequiresRegister());
4520 }
4521 if (materialized) {
4522 locations_to_set->SetInAt(2, Location::RequiresRegister());
4523 }
4524
4525 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004526 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004527 ? Location::RequiresFpuRegister()
4528 : Location::RequiresRegister());
4529 } else {
4530 locations_to_set->SetOut(Location::SameAsFirstInput());
4531 }
4532 }
4533
4534 return can_move_conditionally;
4535}
4536
4537
4538void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4539 LocationSummary* locations = select->GetLocations();
4540 Location dst = locations->Out();
4541 Location false_src = locations->InAt(0);
4542 Location true_src = locations->InAt(1);
4543 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4544 GpuRegister cond_reg = TMP;
4545 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004546 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004547 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004548 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004549
4550 if (IsBooleanValueOrMaterializedCondition(cond)) {
4551 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4552 } else {
4553 HCondition* condition = cond->AsCondition();
4554 LocationSummary* cond_locations = cond->GetLocations();
4555 IfCondition if_cond = condition->GetCondition();
4556 cond_type = condition->InputAt(0)->GetType();
4557 switch (cond_type) {
4558 default:
4559 cond_inverted = MaterializeIntLongCompare(if_cond,
4560 /* is64bit */ false,
4561 cond_locations,
4562 cond_reg);
4563 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004564 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004565 cond_inverted = MaterializeIntLongCompare(if_cond,
4566 /* is64bit */ true,
4567 cond_locations,
4568 cond_reg);
4569 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004570 case DataType::Type::kFloat32:
4571 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004572 cond_inverted = MaterializeFpCompare(if_cond,
4573 condition->IsGtBias(),
4574 cond_type,
4575 cond_locations,
4576 fcond_reg);
4577 break;
4578 }
4579 }
4580
4581 if (true_src.IsConstant()) {
4582 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4583 }
4584 if (false_src.IsConstant()) {
4585 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4586 }
4587
4588 switch (dst_type) {
4589 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004590 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004591 __ Mfc1(cond_reg, fcond_reg);
4592 }
4593 if (true_src.IsConstant()) {
4594 if (cond_inverted) {
4595 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4596 } else {
4597 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4598 }
4599 } else if (false_src.IsConstant()) {
4600 if (cond_inverted) {
4601 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4602 } else {
4603 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4604 }
4605 } else {
4606 DCHECK_NE(cond_reg, AT);
4607 if (cond_inverted) {
4608 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4609 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4610 } else {
4611 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4612 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4613 }
4614 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4615 }
4616 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004617 case DataType::Type::kFloat32: {
4618 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004619 // sel*.fmt tests bit 0 of the condition register, account for that.
4620 __ Sltu(TMP, ZERO, cond_reg);
4621 __ Mtc1(TMP, fcond_reg);
4622 }
4623 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4624 if (true_src.IsConstant()) {
4625 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4626 if (cond_inverted) {
4627 __ SelnezS(dst_reg, src_reg, fcond_reg);
4628 } else {
4629 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4630 }
4631 } else if (false_src.IsConstant()) {
4632 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4633 if (cond_inverted) {
4634 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4635 } else {
4636 __ SelnezS(dst_reg, src_reg, fcond_reg);
4637 }
4638 } else {
4639 if (cond_inverted) {
4640 __ SelS(fcond_reg,
4641 true_src.AsFpuRegister<FpuRegister>(),
4642 false_src.AsFpuRegister<FpuRegister>());
4643 } else {
4644 __ SelS(fcond_reg,
4645 false_src.AsFpuRegister<FpuRegister>(),
4646 true_src.AsFpuRegister<FpuRegister>());
4647 }
4648 __ MovS(dst_reg, fcond_reg);
4649 }
4650 break;
4651 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004652 case DataType::Type::kFloat64: {
4653 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004654 // sel*.fmt tests bit 0 of the condition register, account for that.
4655 __ Sltu(TMP, ZERO, cond_reg);
4656 __ Mtc1(TMP, fcond_reg);
4657 }
4658 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4659 if (true_src.IsConstant()) {
4660 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4661 if (cond_inverted) {
4662 __ SelnezD(dst_reg, src_reg, fcond_reg);
4663 } else {
4664 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4665 }
4666 } else if (false_src.IsConstant()) {
4667 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4668 if (cond_inverted) {
4669 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4670 } else {
4671 __ SelnezD(dst_reg, src_reg, fcond_reg);
4672 }
4673 } else {
4674 if (cond_inverted) {
4675 __ SelD(fcond_reg,
4676 true_src.AsFpuRegister<FpuRegister>(),
4677 false_src.AsFpuRegister<FpuRegister>());
4678 } else {
4679 __ SelD(fcond_reg,
4680 false_src.AsFpuRegister<FpuRegister>(),
4681 true_src.AsFpuRegister<FpuRegister>());
4682 }
4683 __ MovD(dst_reg, fcond_reg);
4684 }
4685 break;
4686 }
4687 }
4688}
4689
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004690void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004691 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004692 LocationSummary(flag, LocationSummary::kNoCall);
4693 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004694}
4695
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004696void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4697 __ LoadFromOffset(kLoadWord,
4698 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4699 SP,
4700 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004701}
4702
David Brazdil74eb1b22015-12-14 11:44:01 +00004703void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004704 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004705 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004706}
4707
4708void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004709 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4710 GenConditionalMove(select);
4711 } else {
4712 LocationSummary* locations = select->GetLocations();
4713 Mips64Label false_target;
4714 GenerateTestAndBranch(select,
4715 /* condition_input_index */ 2,
4716 /* true_target */ nullptr,
4717 &false_target);
4718 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4719 __ Bind(&false_target);
4720 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004721}
4722
David Srbecky0cf44932015-12-09 14:09:59 +00004723void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004724 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004725}
4726
David Srbeckyd28f4a02016-03-14 17:14:24 +00004727void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4728 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004729}
4730
4731void CodeGeneratorMIPS64::GenerateNop() {
4732 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004733}
4734
Alexey Frunze4dda3372015-06-01 18:31:49 -07004735void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004736 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004737 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004738 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004739 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004740 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004741 instruction,
4742 object_field_get_with_read_barrier
4743 ? LocationSummary::kCallOnSlowPath
4744 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004745 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4746 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4747 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004748 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004749 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004750 locations->SetOut(Location::RequiresFpuRegister());
4751 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004752 // The output overlaps in the case of an object field get with
4753 // read barriers enabled: we do not want the move to overwrite the
4754 // object's location, as we need it to emit the read barrier.
4755 locations->SetOut(Location::RequiresRegister(),
4756 object_field_get_with_read_barrier
4757 ? Location::kOutputOverlap
4758 : Location::kNoOutputOverlap);
4759 }
4760 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4761 // We need a temporary register for the read barrier marking slow
4762 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004763 if (!kBakerReadBarrierThunksEnableForFields) {
4764 locations->AddTemp(Location::RequiresRegister());
4765 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004766 }
4767}
4768
4769void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4770 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004771 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4772 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004773 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004774 Location obj_loc = locations->InAt(0);
4775 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4776 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004777 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004778 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004779 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004780 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4781
Alexey Frunze4dda3372015-06-01 18:31:49 -07004782 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004783 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004784 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004785 load_type = kLoadUnsignedByte;
4786 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004787 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004788 load_type = kLoadSignedByte;
4789 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004790 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004791 load_type = kLoadUnsignedHalfword;
4792 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004793 case DataType::Type::kInt16:
4794 load_type = kLoadSignedHalfword;
4795 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004796 case DataType::Type::kInt32:
4797 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004798 load_type = kLoadWord;
4799 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004800 case DataType::Type::kInt64:
4801 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004802 load_type = kLoadDoubleword;
4803 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004804 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004805 load_type = kLoadUnsignedWord;
4806 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004807 case DataType::Type::kUint32:
4808 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004809 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004810 LOG(FATAL) << "Unreachable type " << type;
4811 UNREACHABLE();
4812 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004813 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004814 DCHECK(dst_loc.IsRegister());
4815 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004816 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004817 // /* HeapReference<Object> */ dst = *(obj + offset)
4818 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004819 Location temp_loc =
4820 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004821 // Note that a potential implicit null check is handled in this
4822 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4823 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4824 dst_loc,
4825 obj,
4826 offset,
4827 temp_loc,
4828 /* needs_null_check */ true);
4829 if (is_volatile) {
4830 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4831 }
4832 } else {
4833 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4834 if (is_volatile) {
4835 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4836 }
4837 // If read barriers are enabled, emit read barriers other than
4838 // Baker's using a slow path (and also unpoison the loaded
4839 // reference, if heap poisoning is enabled).
4840 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4841 }
4842 } else {
4843 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4844 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004845 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004846 DCHECK(dst_loc.IsFpuRegister());
4847 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004848 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004849 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004850
Alexey Frunze15958152017-02-09 19:08:30 -08004851 // Memory barriers, in the case of references, are handled in the
4852 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004853 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004854 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004855 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004856}
4857
4858void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4859 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4860 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004861 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004862 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004863 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004864 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004865 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004866 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004867 }
4868}
4869
4870void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004871 const FieldInfo& field_info,
4872 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004873 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004874 LocationSummary* locations = instruction->GetLocations();
4875 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004876 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004877 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004878 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004879 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4880 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004881 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4882
Alexey Frunze4dda3372015-06-01 18:31:49 -07004883 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004884 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004885 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004886 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004887 store_type = kStoreByte;
4888 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004889 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004890 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004891 store_type = kStoreHalfword;
4892 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004893 case DataType::Type::kInt32:
4894 case DataType::Type::kFloat32:
4895 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004896 store_type = kStoreWord;
4897 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004898 case DataType::Type::kInt64:
4899 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004900 store_type = kStoreDoubleword;
4901 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004902 case DataType::Type::kUint32:
4903 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004904 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004905 LOG(FATAL) << "Unreachable type " << type;
4906 UNREACHABLE();
4907 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004908
Alexey Frunze15958152017-02-09 19:08:30 -08004909 if (is_volatile) {
4910 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4911 }
4912
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004913 if (value_location.IsConstant()) {
4914 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4915 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4916 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004917 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004918 DCHECK(value_location.IsRegister());
4919 GpuRegister src = value_location.AsRegister<GpuRegister>();
4920 if (kPoisonHeapReferences && needs_write_barrier) {
4921 // Note that in the case where `value` is a null reference,
4922 // we do not enter this block, as a null reference does not
4923 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004924 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004925 __ PoisonHeapReference(TMP, src);
4926 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4927 } else {
4928 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4929 }
4930 } else {
4931 DCHECK(value_location.IsFpuRegister());
4932 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4933 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4934 }
4935 }
Alexey Frunze15958152017-02-09 19:08:30 -08004936
Alexey Frunzec061de12017-02-14 13:27:23 -08004937 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004938 DCHECK(value_location.IsRegister());
4939 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004940 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004941 }
Alexey Frunze15958152017-02-09 19:08:30 -08004942
4943 if (is_volatile) {
4944 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4945 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004946}
4947
4948void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4949 HandleFieldGet(instruction, instruction->GetFieldInfo());
4950}
4951
4952void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4953 HandleFieldGet(instruction, instruction->GetFieldInfo());
4954}
4955
4956void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4957 HandleFieldSet(instruction, instruction->GetFieldInfo());
4958}
4959
4960void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004961 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004962}
4963
Alexey Frunze15958152017-02-09 19:08:30 -08004964void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4965 HInstruction* instruction,
4966 Location out,
4967 uint32_t offset,
4968 Location maybe_temp,
4969 ReadBarrierOption read_barrier_option) {
4970 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4971 if (read_barrier_option == kWithReadBarrier) {
4972 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004973 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4974 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4975 }
Alexey Frunze15958152017-02-09 19:08:30 -08004976 if (kUseBakerReadBarrier) {
4977 // Load with fast path based Baker's read barrier.
4978 // /* HeapReference<Object> */ out = *(out + offset)
4979 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4980 out,
4981 out_reg,
4982 offset,
4983 maybe_temp,
4984 /* needs_null_check */ false);
4985 } else {
4986 // Load with slow path based read barrier.
4987 // Save the value of `out` into `maybe_temp` before overwriting it
4988 // in the following move operation, as we will need it for the
4989 // read barrier below.
4990 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4991 // /* HeapReference<Object> */ out = *(out + offset)
4992 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4993 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4994 }
4995 } else {
4996 // Plain load with no read barrier.
4997 // /* HeapReference<Object> */ out = *(out + offset)
4998 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4999 __ MaybeUnpoisonHeapReference(out_reg);
5000 }
5001}
5002
5003void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
5004 HInstruction* instruction,
5005 Location out,
5006 Location obj,
5007 uint32_t offset,
5008 Location maybe_temp,
5009 ReadBarrierOption read_barrier_option) {
5010 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5011 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
5012 if (read_barrier_option == kWithReadBarrier) {
5013 CHECK(kEmitCompilerReadBarrier);
5014 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005015 if (!kBakerReadBarrierThunksEnableForFields) {
5016 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5017 }
Alexey Frunze15958152017-02-09 19:08:30 -08005018 // Load with fast path based Baker's read barrier.
5019 // /* HeapReference<Object> */ out = *(obj + offset)
5020 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5021 out,
5022 obj_reg,
5023 offset,
5024 maybe_temp,
5025 /* needs_null_check */ false);
5026 } else {
5027 // Load with slow path based read barrier.
5028 // /* HeapReference<Object> */ out = *(obj + offset)
5029 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5030 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5031 }
5032 } else {
5033 // Plain load with no read barrier.
5034 // /* HeapReference<Object> */ out = *(obj + offset)
5035 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5036 __ MaybeUnpoisonHeapReference(out_reg);
5037 }
5038}
5039
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005040static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5041 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5042 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5043 return reg - V0;
5044 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5045 return 13 + (reg - S2);
5046 } else if (reg == S8) { // One more.
5047 return 19;
5048 }
5049 LOG(FATAL) << "Unexpected register " << reg;
5050 UNREACHABLE();
5051}
5052
5053static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5054 int num = GetBakerMarkThunkNumber(reg) +
5055 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5056 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5057}
5058
5059static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5060 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5061 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5062}
5063
5064void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5065 Location root,
5066 GpuRegister obj,
5067 uint32_t offset,
5068 ReadBarrierOption read_barrier_option,
5069 Mips64Label* label_low) {
5070 if (label_low != nullptr) {
5071 DCHECK_EQ(offset, 0x5678u);
5072 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005073 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005074 if (read_barrier_option == kWithReadBarrier) {
5075 DCHECK(kEmitCompilerReadBarrier);
5076 if (kUseBakerReadBarrier) {
5077 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5078 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005079 if (kBakerReadBarrierThunksEnableForGcRoots) {
5080 // Note that we do not actually check the value of `GetIsGcMarking()`
5081 // to decide whether to mark the loaded GC root or not. Instead, we
5082 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5083 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5084 // vice versa.
5085 //
5086 // We use thunks for the slow path. That thunk checks the reference
5087 // and jumps to the entrypoint if needed.
5088 //
5089 // temp = Thread::Current()->pReadBarrierMarkReg00
5090 // // AKA &art_quick_read_barrier_mark_introspection.
5091 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5092 // if (temp != nullptr) {
5093 // temp = &gc_root_thunk<root_reg>
5094 // root = temp(root)
5095 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005096
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005097 const int32_t entry_point_offset =
5098 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5099 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5100 int16_t offset_low = Low16Bits(offset);
5101 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5102 // extension in lwu.
5103 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5104 GpuRegister base = short_offset ? obj : TMP;
5105 // Loading the entrypoint does not require a load acquire since it is only changed when
5106 // threads are suspended or running a checkpoint.
5107 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5108 if (!short_offset) {
5109 DCHECK(!label_low);
5110 __ Daui(base, obj, offset_high);
5111 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005112 Mips64Label skip_call;
5113 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005114 if (label_low != nullptr) {
5115 DCHECK(short_offset);
5116 __ Bind(label_low);
5117 }
5118 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5119 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5120 // in delay slot.
5121 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005122 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005123 } else {
5124 // Note that we do not actually check the value of `GetIsGcMarking()`
5125 // to decide whether to mark the loaded GC root or not. Instead, we
5126 // load into `temp` (T9) the read barrier mark entry point corresponding
5127 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5128 // is false, and vice versa.
5129 //
5130 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5131 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5132 // if (temp != null) {
5133 // root = temp(root)
5134 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005135
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005136 if (label_low != nullptr) {
5137 __ Bind(label_low);
5138 }
5139 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5140 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5141 static_assert(
5142 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5143 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5144 "have different sizes.");
5145 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5146 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5147 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005148
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005149 // Slow path marking the GC root `root`.
5150 Location temp = Location::RegisterLocation(T9);
5151 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005152 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005153 instruction,
5154 root,
5155 /*entrypoint*/ temp);
5156 codegen_->AddSlowPath(slow_path);
5157
5158 const int32_t entry_point_offset =
5159 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5160 // Loading the entrypoint does not require a load acquire since it is only changed when
5161 // threads are suspended or running a checkpoint.
5162 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5163 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5164 __ Bind(slow_path->GetExitLabel());
5165 }
Alexey Frunze15958152017-02-09 19:08:30 -08005166 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005167 if (label_low != nullptr) {
5168 __ Bind(label_low);
5169 }
Alexey Frunze15958152017-02-09 19:08:30 -08005170 // GC root loaded through a slow path for read barriers other
5171 // than Baker's.
5172 // /* GcRoot<mirror::Object>* */ root = obj + offset
5173 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5174 // /* mirror::Object* */ root = root->Read()
5175 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5176 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005177 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005178 if (label_low != nullptr) {
5179 __ Bind(label_low);
5180 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005181 // Plain GC root load with no read barrier.
5182 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5183 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5184 // Note that GC roots are not affected by heap poisoning, thus we
5185 // do not have to unpoison `root_reg` here.
5186 }
5187}
5188
Alexey Frunze15958152017-02-09 19:08:30 -08005189void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5190 Location ref,
5191 GpuRegister obj,
5192 uint32_t offset,
5193 Location temp,
5194 bool needs_null_check) {
5195 DCHECK(kEmitCompilerReadBarrier);
5196 DCHECK(kUseBakerReadBarrier);
5197
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005198 if (kBakerReadBarrierThunksEnableForFields) {
5199 // Note that we do not actually check the value of `GetIsGcMarking()`
5200 // to decide whether to mark the loaded reference or not. Instead, we
5201 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5202 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5203 // vice versa.
5204 //
5205 // We use thunks for the slow path. That thunk checks the reference
5206 // and jumps to the entrypoint if needed. If the holder is not gray,
5207 // it issues a load-load memory barrier and returns to the original
5208 // reference load.
5209 //
5210 // temp = Thread::Current()->pReadBarrierMarkReg00
5211 // // AKA &art_quick_read_barrier_mark_introspection.
5212 // if (temp != nullptr) {
5213 // temp = &field_array_thunk<holder_reg>
5214 // temp()
5215 // }
5216 // not_gray_return_address:
5217 // // If the offset is too large to fit into the lw instruction, we
5218 // // use an adjusted base register (TMP) here. This register
5219 // // receives bits 16 ... 31 of the offset before the thunk invocation
5220 // // and the thunk benefits from it.
5221 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5222 // gray_return_address:
5223
5224 DCHECK(temp.IsInvalid());
5225 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5226 const int32_t entry_point_offset =
5227 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5228 // There may have or may have not been a null check if the field offset is smaller than
5229 // the page size.
5230 // There must've been a null check in case it's actually a load from an array.
5231 // We will, however, perform an explicit null check in the thunk as it's easier to
5232 // do it than not.
5233 if (instruction->IsArrayGet()) {
5234 DCHECK(!needs_null_check);
5235 }
5236 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5237 // Loading the entrypoint does not require a load acquire since it is only changed when
5238 // threads are suspended or running a checkpoint.
5239 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5240 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005241 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005242 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005243 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005244 __ Nop(); // In forbidden slot.
5245 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005246 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005247 // /* HeapReference<Object> */ ref = *(obj + offset)
5248 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5249 } else {
5250 int16_t offset_low = Low16Bits(offset);
5251 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005252 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005253 __ Daui(TMP, obj, offset_high); // In delay slot.
5254 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005255 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005256 // /* HeapReference<Object> */ ref = *(obj + offset)
5257 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5258 }
5259 if (needs_null_check) {
5260 MaybeRecordImplicitNullCheck(instruction);
5261 }
5262 __ MaybeUnpoisonHeapReference(ref_reg);
5263 return;
5264 }
5265
Alexey Frunze15958152017-02-09 19:08:30 -08005266 // /* HeapReference<Object> */ ref = *(obj + offset)
5267 Location no_index = Location::NoLocation();
5268 ScaleFactor no_scale_factor = TIMES_1;
5269 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5270 ref,
5271 obj,
5272 offset,
5273 no_index,
5274 no_scale_factor,
5275 temp,
5276 needs_null_check);
5277}
5278
5279void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5280 Location ref,
5281 GpuRegister obj,
5282 uint32_t data_offset,
5283 Location index,
5284 Location temp,
5285 bool needs_null_check) {
5286 DCHECK(kEmitCompilerReadBarrier);
5287 DCHECK(kUseBakerReadBarrier);
5288
5289 static_assert(
5290 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5291 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005292 ScaleFactor scale_factor = TIMES_4;
5293
5294 if (kBakerReadBarrierThunksEnableForArrays) {
5295 // Note that we do not actually check the value of `GetIsGcMarking()`
5296 // to decide whether to mark the loaded reference or not. Instead, we
5297 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5298 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5299 // vice versa.
5300 //
5301 // We use thunks for the slow path. That thunk checks the reference
5302 // and jumps to the entrypoint if needed. If the holder is not gray,
5303 // it issues a load-load memory barrier and returns to the original
5304 // reference load.
5305 //
5306 // temp = Thread::Current()->pReadBarrierMarkReg00
5307 // // AKA &art_quick_read_barrier_mark_introspection.
5308 // if (temp != nullptr) {
5309 // temp = &field_array_thunk<holder_reg>
5310 // temp()
5311 // }
5312 // not_gray_return_address:
5313 // // The element address is pre-calculated in the TMP register before the
5314 // // thunk invocation and the thunk benefits from it.
5315 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5316 // gray_return_address:
5317
5318 DCHECK(temp.IsInvalid());
5319 DCHECK(index.IsValid());
5320 const int32_t entry_point_offset =
5321 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5322 // We will not do the explicit null check in the thunk as some form of a null check
5323 // must've been done earlier.
5324 DCHECK(!needs_null_check);
5325 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5326 // Loading the entrypoint does not require a load acquire since it is only changed when
5327 // threads are suspended or running a checkpoint.
5328 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005329 Mips64Label skip_call;
5330 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005331 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5332 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5333 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5334 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005335 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005336 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5337 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5338 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5339 __ MaybeUnpoisonHeapReference(ref_reg);
5340 return;
5341 }
5342
Alexey Frunze15958152017-02-09 19:08:30 -08005343 // /* HeapReference<Object> */ ref =
5344 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005345 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5346 ref,
5347 obj,
5348 data_offset,
5349 index,
5350 scale_factor,
5351 temp,
5352 needs_null_check);
5353}
5354
5355void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5356 Location ref,
5357 GpuRegister obj,
5358 uint32_t offset,
5359 Location index,
5360 ScaleFactor scale_factor,
5361 Location temp,
5362 bool needs_null_check,
5363 bool always_update_field) {
5364 DCHECK(kEmitCompilerReadBarrier);
5365 DCHECK(kUseBakerReadBarrier);
5366
5367 // In slow path based read barriers, the read barrier call is
5368 // inserted after the original load. However, in fast path based
5369 // Baker's read barriers, we need to perform the load of
5370 // mirror::Object::monitor_ *before* the original reference load.
5371 // This load-load ordering is required by the read barrier.
5372 // The fast path/slow path (for Baker's algorithm) should look like:
5373 //
5374 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5375 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5376 // HeapReference<Object> ref = *src; // Original reference load.
5377 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5378 // if (is_gray) {
5379 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5380 // }
5381 //
5382 // Note: the original implementation in ReadBarrier::Barrier is
5383 // slightly more complex as it performs additional checks that we do
5384 // not do here for performance reasons.
5385
5386 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5387 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5388 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5389
5390 // /* int32_t */ monitor = obj->monitor_
5391 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5392 if (needs_null_check) {
5393 MaybeRecordImplicitNullCheck(instruction);
5394 }
5395 // /* LockWord */ lock_word = LockWord(monitor)
5396 static_assert(sizeof(LockWord) == sizeof(int32_t),
5397 "art::LockWord and int32_t have different sizes.");
5398
5399 __ Sync(0); // Barrier to prevent load-load reordering.
5400
5401 // The actual reference load.
5402 if (index.IsValid()) {
5403 // Load types involving an "index": ArrayGet,
5404 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5405 // intrinsics.
5406 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5407 if (index.IsConstant()) {
5408 size_t computed_offset =
5409 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5410 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5411 } else {
5412 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005413 if (scale_factor == TIMES_1) {
5414 __ Daddu(TMP, index_reg, obj);
5415 } else {
5416 __ Dlsa(TMP, index_reg, obj, scale_factor);
5417 }
Alexey Frunze15958152017-02-09 19:08:30 -08005418 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5419 }
5420 } else {
5421 // /* HeapReference<Object> */ ref = *(obj + offset)
5422 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5423 }
5424
5425 // Object* ref = ref_addr->AsMirrorPtr()
5426 __ MaybeUnpoisonHeapReference(ref_reg);
5427
5428 // Slow path marking the object `ref` when it is gray.
5429 SlowPathCodeMIPS64* slow_path;
5430 if (always_update_field) {
5431 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5432 // of the form `obj + field_offset`, where `obj` is a register and
5433 // `field_offset` is a register. Thus `offset` and `scale_factor`
5434 // above are expected to be null in this code path.
5435 DCHECK_EQ(offset, 0u);
5436 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005437 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005438 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5439 ref,
5440 obj,
5441 /* field_offset */ index,
5442 temp_reg);
5443 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005444 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005445 }
5446 AddSlowPath(slow_path);
5447
5448 // if (rb_state == ReadBarrier::GrayState())
5449 // ref = ReadBarrier::Mark(ref);
5450 // Given the numeric representation, it's enough to check the low bit of the
5451 // rb_state. We do that by shifting the bit into the sign bit (31) and
5452 // performing a branch on less than zero.
5453 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5454 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5455 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5456 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5457 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5458 __ Bind(slow_path->GetExitLabel());
5459}
5460
5461void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5462 Location out,
5463 Location ref,
5464 Location obj,
5465 uint32_t offset,
5466 Location index) {
5467 DCHECK(kEmitCompilerReadBarrier);
5468
5469 // Insert a slow path based read barrier *after* the reference load.
5470 //
5471 // If heap poisoning is enabled, the unpoisoning of the loaded
5472 // reference will be carried out by the runtime within the slow
5473 // path.
5474 //
5475 // Note that `ref` currently does not get unpoisoned (when heap
5476 // poisoning is enabled), which is alright as the `ref` argument is
5477 // not used by the artReadBarrierSlow entry point.
5478 //
5479 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005480 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005481 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5482 AddSlowPath(slow_path);
5483
5484 __ Bc(slow_path->GetEntryLabel());
5485 __ Bind(slow_path->GetExitLabel());
5486}
5487
5488void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5489 Location out,
5490 Location ref,
5491 Location obj,
5492 uint32_t offset,
5493 Location index) {
5494 if (kEmitCompilerReadBarrier) {
5495 // Baker's read barriers shall be handled by the fast path
5496 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5497 DCHECK(!kUseBakerReadBarrier);
5498 // If heap poisoning is enabled, unpoisoning will be taken care of
5499 // by the runtime within the slow path.
5500 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5501 } else if (kPoisonHeapReferences) {
5502 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5503 }
5504}
5505
5506void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5507 Location out,
5508 Location root) {
5509 DCHECK(kEmitCompilerReadBarrier);
5510
5511 // Insert a slow path based read barrier *after* the GC root load.
5512 //
5513 // Note that GC roots are not affected by heap poisoning, so we do
5514 // not need to do anything special for this here.
5515 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005516 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005517 AddSlowPath(slow_path);
5518
5519 __ Bc(slow_path->GetEntryLabel());
5520 __ Bind(slow_path->GetExitLabel());
5521}
5522
Alexey Frunze4dda3372015-06-01 18:31:49 -07005523void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005524 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5525 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005526 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005527 switch (type_check_kind) {
5528 case TypeCheckKind::kExactCheck:
5529 case TypeCheckKind::kAbstractClassCheck:
5530 case TypeCheckKind::kClassHierarchyCheck:
5531 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08005532 call_kind =
5533 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07005534 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005535 break;
5536 case TypeCheckKind::kArrayCheck:
5537 case TypeCheckKind::kUnresolvedCheck:
5538 case TypeCheckKind::kInterfaceCheck:
5539 call_kind = LocationSummary::kCallOnSlowPath;
5540 break;
5541 }
5542
Vladimir Markoca6fff82017-10-03 14:49:14 +01005543 LocationSummary* locations =
5544 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005545 if (baker_read_barrier_slow_path) {
5546 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5547 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005548 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005549 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005550 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005551 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005552 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005553 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005554}
5555
5556void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005557 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005558 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005559 Location obj_loc = locations->InAt(0);
5560 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005561 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005562 Location out_loc = locations->Out();
5563 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5564 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5565 DCHECK_LE(num_temps, 1u);
5566 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005567 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5568 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5569 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5570 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005571 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005572 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005573
5574 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005575 // Avoid this check if we know `obj` is not null.
5576 if (instruction->MustDoNullCheck()) {
5577 __ Move(out, ZERO);
5578 __ Beqzc(obj, &done);
5579 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005580
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005581 switch (type_check_kind) {
5582 case TypeCheckKind::kExactCheck: {
5583 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005584 GenerateReferenceLoadTwoRegisters(instruction,
5585 out_loc,
5586 obj_loc,
5587 class_offset,
5588 maybe_temp_loc,
5589 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005590 // Classes must be equal for the instanceof to succeed.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005591 __ Xor(out, out, cls);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005592 __ Sltiu(out, out, 1);
5593 break;
5594 }
5595
5596 case TypeCheckKind::kAbstractClassCheck: {
5597 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005598 GenerateReferenceLoadTwoRegisters(instruction,
5599 out_loc,
5600 obj_loc,
5601 class_offset,
5602 maybe_temp_loc,
5603 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005604 // If the class is abstract, we eagerly fetch the super class of the
5605 // object to avoid doing a comparison we know will fail.
5606 Mips64Label loop;
5607 __ Bind(&loop);
5608 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005609 GenerateReferenceLoadOneRegister(instruction,
5610 out_loc,
5611 super_offset,
5612 maybe_temp_loc,
5613 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005614 // If `out` is null, we use it for the result, and jump to `done`.
5615 __ Beqzc(out, &done);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005616 __ Bnec(out, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005617 __ LoadConst32(out, 1);
5618 break;
5619 }
5620
5621 case TypeCheckKind::kClassHierarchyCheck: {
5622 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005623 GenerateReferenceLoadTwoRegisters(instruction,
5624 out_loc,
5625 obj_loc,
5626 class_offset,
5627 maybe_temp_loc,
5628 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005629 // Walk over the class hierarchy to find a match.
5630 Mips64Label loop, success;
5631 __ Bind(&loop);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005632 __ Beqc(out, cls, &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005633 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005634 GenerateReferenceLoadOneRegister(instruction,
5635 out_loc,
5636 super_offset,
5637 maybe_temp_loc,
5638 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005639 __ Bnezc(out, &loop);
5640 // If `out` is null, we use it for the result, and jump to `done`.
5641 __ Bc(&done);
5642 __ Bind(&success);
5643 __ LoadConst32(out, 1);
5644 break;
5645 }
5646
5647 case TypeCheckKind::kArrayObjectCheck: {
5648 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005649 GenerateReferenceLoadTwoRegisters(instruction,
5650 out_loc,
5651 obj_loc,
5652 class_offset,
5653 maybe_temp_loc,
5654 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005655 // Do an exact check.
5656 Mips64Label success;
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005657 __ Beqc(out, cls, &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005658 // Otherwise, we need to check that the object's class is a non-primitive array.
5659 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005660 GenerateReferenceLoadOneRegister(instruction,
5661 out_loc,
5662 component_offset,
5663 maybe_temp_loc,
5664 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005665 // If `out` is null, we use it for the result, and jump to `done`.
5666 __ Beqzc(out, &done);
5667 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5668 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5669 __ Sltiu(out, out, 1);
5670 __ Bc(&done);
5671 __ Bind(&success);
5672 __ LoadConst32(out, 1);
5673 break;
5674 }
5675
5676 case TypeCheckKind::kArrayCheck: {
5677 // No read barrier since the slow path will retry upon failure.
5678 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005679 GenerateReferenceLoadTwoRegisters(instruction,
5680 out_loc,
5681 obj_loc,
5682 class_offset,
5683 maybe_temp_loc,
5684 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005685 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005686 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5687 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005688 codegen_->AddSlowPath(slow_path);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005689 __ Bnec(out, cls, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005690 __ LoadConst32(out, 1);
5691 break;
5692 }
5693
5694 case TypeCheckKind::kUnresolvedCheck:
5695 case TypeCheckKind::kInterfaceCheck: {
5696 // Note that we indeed only call on slow path, but we always go
5697 // into the slow path for the unresolved and interface check
5698 // cases.
5699 //
5700 // We cannot directly call the InstanceofNonTrivial runtime
5701 // entry point without resorting to a type checking slow path
5702 // here (i.e. by calling InvokeRuntime directly), as it would
5703 // require to assign fixed registers for the inputs of this
5704 // HInstanceOf instruction (following the runtime calling
5705 // convention), which might be cluttered by the potential first
5706 // read barrier emission at the beginning of this method.
5707 //
5708 // TODO: Introduce a new runtime entry point taking the object
5709 // to test (instead of its class) as argument, and let it deal
5710 // with the read barrier issues. This will let us refactor this
5711 // case of the `switch` code as it was previously (with a direct
5712 // call to the runtime not using a type checking slow path).
5713 // This should also be beneficial for the other cases above.
5714 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005715 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5716 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005717 codegen_->AddSlowPath(slow_path);
5718 __ Bc(slow_path->GetEntryLabel());
5719 break;
5720 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005721 }
5722
5723 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005724
5725 if (slow_path != nullptr) {
5726 __ Bind(slow_path->GetExitLabel());
5727 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005728}
5729
5730void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005731 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005732 locations->SetOut(Location::ConstantLocation(constant));
5733}
5734
5735void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5736 // Will be generated at use site.
5737}
5738
5739void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005740 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005741 locations->SetOut(Location::ConstantLocation(constant));
5742}
5743
5744void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5745 // Will be generated at use site.
5746}
5747
Calin Juravle175dc732015-08-25 15:42:32 +01005748void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5749 // The trampoline uses the same calling convention as dex calling conventions,
5750 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5751 // the method_idx.
5752 HandleInvoke(invoke);
5753}
5754
5755void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5756 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5757}
5758
Alexey Frunze4dda3372015-06-01 18:31:49 -07005759void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5760 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5761 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5762}
5763
5764void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5765 HandleInvoke(invoke);
5766 // The register T0 is required to be used for the hidden argument in
5767 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5768 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5769}
5770
5771void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5772 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5773 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005774 Location receiver = invoke->GetLocations()->InAt(0);
5775 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005776 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005777
5778 // Set the hidden argument.
5779 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5780 invoke->GetDexMethodIndex());
5781
5782 // temp = object->GetClass();
5783 if (receiver.IsStackSlot()) {
5784 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5785 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5786 } else {
5787 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5788 }
5789 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005790 // Instead of simply (possibly) unpoisoning `temp` here, we should
5791 // emit a read barrier for the previous class reference load.
5792 // However this is not required in practice, as this is an
5793 // intermediate/temporary reference and because the current
5794 // concurrent copying collector keeps the from-space memory
5795 // intact/accessible until the end of the marking phase (the
5796 // concurrent copying collector may not in the future).
5797 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005798 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5799 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5800 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005801 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005802 // temp = temp->GetImtEntryAt(method_offset);
5803 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5804 // T9 = temp->GetEntryPoint();
5805 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5806 // T9();
5807 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005808 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005809 DCHECK(!codegen_->IsLeafMethod());
5810 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5811}
5812
5813void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005814 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5815 if (intrinsic.TryDispatch(invoke)) {
5816 return;
5817 }
5818
Alexey Frunze4dda3372015-06-01 18:31:49 -07005819 HandleInvoke(invoke);
5820}
5821
5822void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005823 // Explicit clinit checks triggered by static invokes must have been pruned by
5824 // art::PrepareForRegisterAllocation.
5825 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005826
Chris Larsen3039e382015-08-26 07:54:08 -07005827 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5828 if (intrinsic.TryDispatch(invoke)) {
5829 return;
5830 }
5831
Alexey Frunze4dda3372015-06-01 18:31:49 -07005832 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005833}
5834
Orion Hodsonac141392017-01-13 11:53:47 +00005835void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5836 HandleInvoke(invoke);
5837}
5838
5839void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5840 codegen_->GenerateInvokePolymorphicCall(invoke);
5841}
5842
Chris Larsen3039e382015-08-26 07:54:08 -07005843static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005844 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005845 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5846 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005847 return true;
5848 }
5849 return false;
5850}
5851
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005852HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005853 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005854 bool fallback_load = false;
5855 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005856 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005857 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005858 case HLoadString::LoadKind::kBssEntry:
5859 DCHECK(!Runtime::Current()->UseJitCompilation());
5860 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005861 case HLoadString::LoadKind::kJitTableAddress:
5862 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005863 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005864 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005865 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005866 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005867 }
5868 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005869 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005870 }
5871 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005872}
5873
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005874HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5875 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005876 bool fallback_load = false;
5877 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005878 case HLoadClass::LoadKind::kInvalid:
5879 LOG(FATAL) << "UNREACHABLE";
5880 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005881 case HLoadClass::LoadKind::kReferrersClass:
5882 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005883 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005884 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005885 case HLoadClass::LoadKind::kBssEntry:
5886 DCHECK(!Runtime::Current()->UseJitCompilation());
5887 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005888 case HLoadClass::LoadKind::kJitTableAddress:
5889 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005890 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005891 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005892 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005893 break;
5894 }
5895 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005896 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005897 }
5898 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005899}
5900
Vladimir Markodc151b22015-10-15 18:02:30 +01005901HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5902 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005903 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005904 // On MIPS64 we support all dispatch types.
5905 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005906}
5907
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005908void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5909 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005910 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005911 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005912 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5913 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5914
Alexey Frunze19f6c692016-11-30 19:19:55 -08005915 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005916 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005917 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005918 uint32_t offset =
5919 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005920 __ LoadFromOffset(kLoadDoubleword,
5921 temp.AsRegister<GpuRegister>(),
5922 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005923 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005924 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005925 }
Vladimir Marko58155012015-08-19 12:49:41 +00005926 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005927 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005928 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005929 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5930 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005931 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005932 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005933 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5934 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5935 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005936 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5937 break;
5938 }
Vladimir Marko58155012015-08-19 12:49:41 +00005939 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005940 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5941 kLoadDoubleword,
5942 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005943 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005944 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005945 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005946 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005947 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5948 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5949 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005950 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5951 break;
5952 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005953 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5954 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5955 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005956 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005957 }
5958
Alexey Frunze19f6c692016-11-30 19:19:55 -08005959 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005960 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005961 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005962 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005963 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5964 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5965 __ LoadFromOffset(kLoadDoubleword,
5966 T9,
5967 callee_method.AsRegister<GpuRegister>(),
5968 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005969 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005970 // T9()
5971 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005972 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005973 break;
5974 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005975 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5976
Alexey Frunze4dda3372015-06-01 18:31:49 -07005977 DCHECK(!IsLeafMethod());
5978}
5979
5980void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005981 // Explicit clinit checks triggered by static invokes must have been pruned by
5982 // art::PrepareForRegisterAllocation.
5983 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005984
5985 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5986 return;
5987 }
5988
5989 LocationSummary* locations = invoke->GetLocations();
5990 codegen_->GenerateStaticOrDirectCall(invoke,
5991 locations->HasTemps()
5992 ? locations->GetTemp(0)
5993 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005994}
5995
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005996void CodeGeneratorMIPS64::GenerateVirtualCall(
5997 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005998 // Use the calling convention instead of the location of the receiver, as
5999 // intrinsics may have put the receiver in a different register. In the intrinsics
6000 // slow path, the arguments have been moved to the right place, so here we are
6001 // guaranteed that the receiver is the first register of the calling convention.
6002 InvokeDexCallingConvention calling_convention;
6003 GpuRegister receiver = calling_convention.GetRegisterAt(0);
6004
Alexey Frunze53afca12015-11-05 16:34:23 -08006005 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006006 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6007 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
6008 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006009 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006010
6011 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006012 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08006013 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006014 // Instead of simply (possibly) unpoisoning `temp` here, we should
6015 // emit a read barrier for the previous class reference load.
6016 // However this is not required in practice, as this is an
6017 // intermediate/temporary reference and because the current
6018 // concurrent copying collector keeps the from-space memory
6019 // intact/accessible until the end of the marking phase (the
6020 // concurrent copying collector may not in the future).
6021 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006022 // temp = temp->GetMethodAt(method_offset);
6023 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6024 // T9 = temp->GetEntryPoint();
6025 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6026 // T9();
6027 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006028 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006029 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006030}
6031
6032void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6033 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6034 return;
6035 }
6036
6037 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006038 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006039}
6040
6041void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006042 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006043 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006044 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006045 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6046 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006047 return;
6048 }
Vladimir Marko41559982017-01-06 14:04:23 +00006049 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006050
Alexey Frunze15958152017-02-09 19:08:30 -08006051 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6052 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006053 ? LocationSummary::kCallOnSlowPath
6054 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006055 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006056 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6057 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6058 }
Vladimir Marko41559982017-01-06 14:04:23 +00006059 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006060 locations->SetInAt(0, Location::RequiresRegister());
6061 }
6062 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006063 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6064 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6065 // Rely on the type resolution or initialization and marking to save everything we need.
6066 RegisterSet caller_saves = RegisterSet::Empty();
6067 InvokeRuntimeCallingConvention calling_convention;
6068 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6069 locations->SetCustomSlowPathCallerSaves(caller_saves);
6070 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006071 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006072 }
6073 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006074}
6075
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006076// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6077// move.
6078void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006079 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006080 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006081 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006082 return;
6083 }
Vladimir Marko41559982017-01-06 14:04:23 +00006084 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006085
Vladimir Marko41559982017-01-06 14:04:23 +00006086 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006087 Location out_loc = locations->Out();
6088 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6089 GpuRegister current_method_reg = ZERO;
6090 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006091 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006092 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6093 }
6094
Alexey Frunze15958152017-02-09 19:08:30 -08006095 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6096 ? kWithoutReadBarrier
6097 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006098 bool generate_null_check = false;
6099 switch (load_kind) {
6100 case HLoadClass::LoadKind::kReferrersClass:
6101 DCHECK(!cls->CanCallRuntime());
6102 DCHECK(!cls->MustGenerateClinitCheck());
6103 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6104 GenerateGcRootFieldLoad(cls,
6105 out_loc,
6106 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006107 ArtMethod::DeclaringClassOffset().Int32Value(),
6108 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006109 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006110 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006111 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006112 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006113 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006114 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006115 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6116 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6117 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006118 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6119 break;
6120 }
6121 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006122 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006123 uint32_t address = dchecked_integral_cast<uint32_t>(
6124 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6125 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006126 __ LoadLiteral(out,
6127 kLoadUnsignedWord,
6128 codegen_->DeduplicateBootImageAddressLiteral(address));
6129 break;
6130 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006131 case HLoadClass::LoadKind::kBootImageClassTable: {
6132 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6133 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6134 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6135 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6136 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6137 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6138 __ Lwu(out, AT, /* placeholder */ 0x5678);
6139 // Extract the reference from the slot data, i.e. clear the hash bits.
6140 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6141 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6142 if (masked_hash != 0) {
6143 __ Daddiu(out, out, -masked_hash);
6144 }
6145 break;
6146 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006147 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006148 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6149 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006150 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6151 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006152 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006153 GenerateGcRootFieldLoad(cls,
6154 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006155 out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006156 /* placeholder */ 0x5678,
6157 read_barrier_option,
6158 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006159 generate_null_check = true;
6160 break;
6161 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006162 case HLoadClass::LoadKind::kJitTableAddress:
6163 __ LoadLiteral(out,
6164 kLoadUnsignedWord,
6165 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6166 cls->GetTypeIndex(),
6167 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006168 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006169 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006170 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006171 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006172 LOG(FATAL) << "UNREACHABLE";
6173 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006174 }
6175
6176 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6177 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006178 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00006179 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006180 codegen_->AddSlowPath(slow_path);
6181 if (generate_null_check) {
6182 __ Beqzc(out, slow_path->GetEntryLabel());
6183 }
6184 if (cls->MustGenerateClinitCheck()) {
6185 GenerateClassInitializationCheck(slow_path, out);
6186 } else {
6187 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006188 }
6189 }
6190}
6191
David Brazdilcb1c0552015-08-04 16:22:25 +01006192static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006193 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006194}
6195
Alexey Frunze4dda3372015-06-01 18:31:49 -07006196void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6197 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006198 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006199 locations->SetOut(Location::RequiresRegister());
6200}
6201
6202void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6203 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006204 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6205}
6206
6207void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006208 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006209}
6210
6211void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6212 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006213}
6214
Alexey Frunze4dda3372015-06-01 18:31:49 -07006215void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006216 HLoadString::LoadKind load_kind = load->GetLoadKind();
6217 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006218 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006219 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006220 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006221 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006222 } else {
6223 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006224 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6225 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6226 // Rely on the pResolveString and marking to save everything we need.
6227 RegisterSet caller_saves = RegisterSet::Empty();
6228 InvokeRuntimeCallingConvention calling_convention;
6229 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6230 locations->SetCustomSlowPathCallerSaves(caller_saves);
6231 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006232 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006233 }
6234 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006235 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006236}
6237
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006238// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6239// move.
6240void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006241 HLoadString::LoadKind load_kind = load->GetLoadKind();
6242 LocationSummary* locations = load->GetLocations();
6243 Location out_loc = locations->Out();
6244 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6245
6246 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006247 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6248 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006249 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006250 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006251 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6252 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6253 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006254 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006255 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006256 }
6257 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006258 uint32_t address = dchecked_integral_cast<uint32_t>(
6259 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6260 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006261 __ LoadLiteral(out,
6262 kLoadUnsignedWord,
6263 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006264 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006265 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006266 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006267 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006268 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006269 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006270 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6271 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006272 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6273 __ Lwu(out, AT, /* placeholder */ 0x5678);
6274 return;
6275 }
6276 case HLoadString::LoadKind::kBssEntry: {
6277 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6278 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6279 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6280 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6281 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006282 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006283 GenerateGcRootFieldLoad(load,
6284 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006285 out,
Alexey Frunze15958152017-02-09 19:08:30 -08006286 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006287 kCompilerReadBarrierOption,
6288 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006289 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006290 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006291 codegen_->AddSlowPath(slow_path);
6292 __ Beqzc(out, slow_path->GetEntryLabel());
6293 __ Bind(slow_path->GetExitLabel());
6294 return;
6295 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006296 case HLoadString::LoadKind::kJitTableAddress:
6297 __ LoadLiteral(out,
6298 kLoadUnsignedWord,
6299 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6300 load->GetStringIndex(),
6301 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006302 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006303 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006304 default:
6305 break;
6306 }
6307
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006308 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006309 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006310 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006311 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006312 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6313 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6314 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006315}
6316
Alexey Frunze4dda3372015-06-01 18:31:49 -07006317void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006318 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006319 locations->SetOut(Location::ConstantLocation(constant));
6320}
6321
6322void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6323 // Will be generated at use site.
6324}
6325
6326void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006327 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6328 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006329 InvokeRuntimeCallingConvention calling_convention;
6330 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6331}
6332
6333void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006334 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006335 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006336 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006337 if (instruction->IsEnter()) {
6338 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6339 } else {
6340 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6341 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006342}
6343
6344void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6345 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006346 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006347 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006348 case DataType::Type::kInt32:
6349 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006350 locations->SetInAt(0, Location::RequiresRegister());
6351 locations->SetInAt(1, Location::RequiresRegister());
6352 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6353 break;
6354
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006355 case DataType::Type::kFloat32:
6356 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006357 locations->SetInAt(0, Location::RequiresFpuRegister());
6358 locations->SetInAt(1, Location::RequiresFpuRegister());
6359 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6360 break;
6361
6362 default:
6363 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6364 }
6365}
6366
6367void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006368 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006369 LocationSummary* locations = instruction->GetLocations();
6370
6371 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006372 case DataType::Type::kInt32:
6373 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006374 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6375 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6376 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006377 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006378 __ MulR6(dst, lhs, rhs);
6379 else
6380 __ Dmul(dst, lhs, rhs);
6381 break;
6382 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006383 case DataType::Type::kFloat32:
6384 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006385 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6386 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6387 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006388 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006389 __ MulS(dst, lhs, rhs);
6390 else
6391 __ MulD(dst, lhs, rhs);
6392 break;
6393 }
6394 default:
6395 LOG(FATAL) << "Unexpected mul type " << type;
6396 }
6397}
6398
6399void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6400 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006401 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006402 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006403 case DataType::Type::kInt32:
6404 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006405 locations->SetInAt(0, Location::RequiresRegister());
6406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6407 break;
6408
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006409 case DataType::Type::kFloat32:
6410 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006411 locations->SetInAt(0, Location::RequiresFpuRegister());
6412 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6413 break;
6414
6415 default:
6416 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6417 }
6418}
6419
6420void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006421 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006422 LocationSummary* locations = instruction->GetLocations();
6423
6424 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006425 case DataType::Type::kInt32:
6426 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006427 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6428 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006429 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006430 __ Subu(dst, ZERO, src);
6431 else
6432 __ Dsubu(dst, ZERO, src);
6433 break;
6434 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006435 case DataType::Type::kFloat32:
6436 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006437 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6438 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006439 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006440 __ NegS(dst, src);
6441 else
6442 __ NegD(dst, src);
6443 break;
6444 }
6445 default:
6446 LOG(FATAL) << "Unexpected neg type " << type;
6447 }
6448}
6449
6450void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006451 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6452 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006453 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006454 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006455 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6456 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006457}
6458
6459void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006460 // Note: if heap poisoning is enabled, the entry point takes care
6461 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006462 QuickEntrypointEnum entrypoint =
6463 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6464 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006465 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006466 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006467}
6468
6469void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006470 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6471 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006472 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006473 if (instruction->IsStringAlloc()) {
6474 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6475 } else {
6476 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006477 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006478 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006479}
6480
6481void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006482 // Note: if heap poisoning is enabled, the entry point takes care
6483 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006484 if (instruction->IsStringAlloc()) {
6485 // String is allocated through StringFactory. Call NewEmptyString entry point.
6486 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006487 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006488 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006489 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6490 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6491 __ Jalr(T9);
6492 __ Nop();
6493 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6494 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006495 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006496 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006497 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006498}
6499
6500void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006501 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006502 locations->SetInAt(0, Location::RequiresRegister());
6503 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6504}
6505
6506void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006507 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006508 LocationSummary* locations = instruction->GetLocations();
6509
6510 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006511 case DataType::Type::kInt32:
6512 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006513 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6514 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6515 __ Nor(dst, src, ZERO);
6516 break;
6517 }
6518
6519 default:
6520 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6521 }
6522}
6523
6524void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006525 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006526 locations->SetInAt(0, Location::RequiresRegister());
6527 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6528}
6529
6530void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6531 LocationSummary* locations = instruction->GetLocations();
6532 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6533 locations->InAt(0).AsRegister<GpuRegister>(),
6534 1);
6535}
6536
6537void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006538 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6539 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006540}
6541
Calin Juravle2ae48182016-03-16 14:05:09 +00006542void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6543 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006544 return;
6545 }
6546 Location obj = instruction->GetLocations()->InAt(0);
6547
6548 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006549 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006550}
6551
Calin Juravle2ae48182016-03-16 14:05:09 +00006552void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006553 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006554 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006555 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006556
6557 Location obj = instruction->GetLocations()->InAt(0);
6558
6559 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6560}
6561
6562void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006563 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006564}
6565
6566void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6567 HandleBinaryOp(instruction);
6568}
6569
6570void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6571 HandleBinaryOp(instruction);
6572}
6573
6574void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6575 LOG(FATAL) << "Unreachable";
6576}
6577
6578void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006579 if (instruction->GetNext()->IsSuspendCheck() &&
6580 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6581 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6582 // The back edge will generate the suspend check.
6583 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6584 }
6585
Alexey Frunze4dda3372015-06-01 18:31:49 -07006586 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6587}
6588
6589void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006590 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006591 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6592 if (location.IsStackSlot()) {
6593 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6594 } else if (location.IsDoubleStackSlot()) {
6595 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6596 }
6597 locations->SetOut(location);
6598}
6599
6600void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6601 ATTRIBUTE_UNUSED) {
6602 // Nothing to do, the parameter is already at its location.
6603}
6604
6605void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6606 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006607 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006608 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6609}
6610
6611void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6612 ATTRIBUTE_UNUSED) {
6613 // Nothing to do, the method is already at its location.
6614}
6615
6616void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006617 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006618 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006619 locations->SetInAt(i, Location::Any());
6620 }
6621 locations->SetOut(Location::Any());
6622}
6623
6624void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6625 LOG(FATAL) << "Unreachable";
6626}
6627
6628void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006629 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006630 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006631 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6632 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006633 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006634
6635 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006636 case DataType::Type::kInt32:
6637 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006638 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006639 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006640 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6641 break;
6642
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006643 case DataType::Type::kFloat32:
6644 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006645 InvokeRuntimeCallingConvention calling_convention;
6646 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6647 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6648 locations->SetOut(calling_convention.GetReturnLocation(type));
6649 break;
6650 }
6651
6652 default:
6653 LOG(FATAL) << "Unexpected rem type " << type;
6654 }
6655}
6656
6657void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006658 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006659
6660 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006661 case DataType::Type::kInt32:
6662 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006663 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006664 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006665
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006666 case DataType::Type::kFloat32:
6667 case DataType::Type::kFloat64: {
6668 QuickEntrypointEnum entrypoint =
6669 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006670 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006671 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006672 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6673 } else {
6674 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6675 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006676 break;
6677 }
6678 default:
6679 LOG(FATAL) << "Unexpected rem type " << type;
6680 }
6681}
6682
Igor Murashkind01745e2017-04-05 16:40:31 -07006683void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6684 constructor_fence->SetLocations(nullptr);
6685}
6686
6687void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6688 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6689 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6690}
6691
Alexey Frunze4dda3372015-06-01 18:31:49 -07006692void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6693 memory_barrier->SetLocations(nullptr);
6694}
6695
6696void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6697 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6698}
6699
6700void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006701 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006702 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006703 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6704}
6705
6706void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6707 codegen_->GenerateFrameExit();
6708}
6709
6710void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6711 ret->SetLocations(nullptr);
6712}
6713
6714void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6715 codegen_->GenerateFrameExit();
6716}
6717
Alexey Frunze92d90602015-12-18 18:16:36 -08006718void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6719 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006720}
6721
Alexey Frunze92d90602015-12-18 18:16:36 -08006722void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6723 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006724}
6725
Alexey Frunze4dda3372015-06-01 18:31:49 -07006726void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6727 HandleShift(shl);
6728}
6729
6730void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6731 HandleShift(shl);
6732}
6733
6734void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6735 HandleShift(shr);
6736}
6737
6738void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6739 HandleShift(shr);
6740}
6741
Alexey Frunze4dda3372015-06-01 18:31:49 -07006742void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6743 HandleBinaryOp(instruction);
6744}
6745
6746void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6747 HandleBinaryOp(instruction);
6748}
6749
6750void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6751 HandleFieldGet(instruction, instruction->GetFieldInfo());
6752}
6753
6754void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6755 HandleFieldGet(instruction, instruction->GetFieldInfo());
6756}
6757
6758void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6759 HandleFieldSet(instruction, instruction->GetFieldInfo());
6760}
6761
6762void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006763 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006764}
6765
Calin Juravlee460d1d2015-09-29 04:52:17 +01006766void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6767 HUnresolvedInstanceFieldGet* instruction) {
6768 FieldAccessCallingConventionMIPS64 calling_convention;
6769 codegen_->CreateUnresolvedFieldLocationSummary(
6770 instruction, instruction->GetFieldType(), calling_convention);
6771}
6772
6773void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6774 HUnresolvedInstanceFieldGet* instruction) {
6775 FieldAccessCallingConventionMIPS64 calling_convention;
6776 codegen_->GenerateUnresolvedFieldAccess(instruction,
6777 instruction->GetFieldType(),
6778 instruction->GetFieldIndex(),
6779 instruction->GetDexPc(),
6780 calling_convention);
6781}
6782
6783void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6784 HUnresolvedInstanceFieldSet* instruction) {
6785 FieldAccessCallingConventionMIPS64 calling_convention;
6786 codegen_->CreateUnresolvedFieldLocationSummary(
6787 instruction, instruction->GetFieldType(), calling_convention);
6788}
6789
6790void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6791 HUnresolvedInstanceFieldSet* instruction) {
6792 FieldAccessCallingConventionMIPS64 calling_convention;
6793 codegen_->GenerateUnresolvedFieldAccess(instruction,
6794 instruction->GetFieldType(),
6795 instruction->GetFieldIndex(),
6796 instruction->GetDexPc(),
6797 calling_convention);
6798}
6799
6800void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6801 HUnresolvedStaticFieldGet* instruction) {
6802 FieldAccessCallingConventionMIPS64 calling_convention;
6803 codegen_->CreateUnresolvedFieldLocationSummary(
6804 instruction, instruction->GetFieldType(), calling_convention);
6805}
6806
6807void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6808 HUnresolvedStaticFieldGet* instruction) {
6809 FieldAccessCallingConventionMIPS64 calling_convention;
6810 codegen_->GenerateUnresolvedFieldAccess(instruction,
6811 instruction->GetFieldType(),
6812 instruction->GetFieldIndex(),
6813 instruction->GetDexPc(),
6814 calling_convention);
6815}
6816
6817void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6818 HUnresolvedStaticFieldSet* instruction) {
6819 FieldAccessCallingConventionMIPS64 calling_convention;
6820 codegen_->CreateUnresolvedFieldLocationSummary(
6821 instruction, instruction->GetFieldType(), calling_convention);
6822}
6823
6824void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6825 HUnresolvedStaticFieldSet* instruction) {
6826 FieldAccessCallingConventionMIPS64 calling_convention;
6827 codegen_->GenerateUnresolvedFieldAccess(instruction,
6828 instruction->GetFieldType(),
6829 instruction->GetFieldIndex(),
6830 instruction->GetDexPc(),
6831 calling_convention);
6832}
6833
Alexey Frunze4dda3372015-06-01 18:31:49 -07006834void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006835 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6836 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006837 // In suspend check slow path, usually there are no caller-save registers at all.
6838 // If SIMD instructions are present, however, we force spilling all live SIMD
6839 // registers in full width (since the runtime only saves/restores lower part).
6840 locations->SetCustomSlowPathCallerSaves(
6841 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006842}
6843
6844void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6845 HBasicBlock* block = instruction->GetBlock();
6846 if (block->GetLoopInformation() != nullptr) {
6847 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6848 // The back edge will generate the suspend check.
6849 return;
6850 }
6851 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6852 // The goto will generate the suspend check.
6853 return;
6854 }
6855 GenerateSuspendCheck(instruction, nullptr);
6856}
6857
Alexey Frunze4dda3372015-06-01 18:31:49 -07006858void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006859 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6860 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006861 InvokeRuntimeCallingConvention calling_convention;
6862 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6863}
6864
6865void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006866 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006867 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6868}
6869
6870void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006871 DataType::Type input_type = conversion->GetInputType();
6872 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006873 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6874 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006875
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006876 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6877 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006878 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6879 }
6880
Vladimir Markoca6fff82017-10-03 14:49:14 +01006881 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006882
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006883 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006884 locations->SetInAt(0, Location::RequiresFpuRegister());
6885 } else {
6886 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006887 }
6888
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006889 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006890 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006891 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006892 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006893 }
6894}
6895
6896void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6897 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006898 DataType::Type result_type = conversion->GetResultType();
6899 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006900
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006901 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6902 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006903
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006904 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006905 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6906 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6907
6908 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006909 case DataType::Type::kUint8:
6910 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006911 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006912 case DataType::Type::kInt8:
6913 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006914 // Type conversion from long to types narrower than int is a result of code
6915 // transformations. To avoid unpredictable results for SEB and SEH, we first
6916 // need to sign-extend the low 32-bit value into bits 32 through 63.
6917 __ Sll(dst, src, 0);
6918 __ Seb(dst, dst);
6919 } else {
6920 __ Seb(dst, src);
6921 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006922 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006923 case DataType::Type::kUint16:
6924 __ Andi(dst, src, 0xFFFF);
6925 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006926 case DataType::Type::kInt16:
6927 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006928 // Type conversion from long to types narrower than int is a result of code
6929 // transformations. To avoid unpredictable results for SEB and SEH, we first
6930 // need to sign-extend the low 32-bit value into bits 32 through 63.
6931 __ Sll(dst, src, 0);
6932 __ Seh(dst, dst);
6933 } else {
6934 __ Seh(dst, src);
6935 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006936 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006937 case DataType::Type::kInt32:
6938 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006939 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6940 // conversions, except when the input and output registers are the same and we are not
6941 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006942 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006943 __ Sll(dst, src, 0);
6944 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006945 break;
6946
6947 default:
6948 LOG(FATAL) << "Unexpected type conversion from " << input_type
6949 << " to " << result_type;
6950 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006951 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006952 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6953 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006954 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006955 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006956 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006957 __ Cvtsl(dst, FTMP);
6958 } else {
6959 __ Cvtdl(dst, FTMP);
6960 }
6961 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006962 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006963 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006964 __ Cvtsw(dst, FTMP);
6965 } else {
6966 __ Cvtdw(dst, FTMP);
6967 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006968 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006969 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6970 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006971 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6972 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006973
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006974 if (result_type == DataType::Type::kInt64) {
6975 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006976 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006977 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006978 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006979 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006980 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006981 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006982 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006983 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006984 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006985 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006986 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006987 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006988 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006989 } else if (DataType::IsFloatingPointType(result_type) &&
6990 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006991 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6992 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006993 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006994 __ Cvtsd(dst, src);
6995 } else {
6996 __ Cvtds(dst, src);
6997 }
6998 } else {
6999 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
7000 << " to " << result_type;
7001 }
7002}
7003
7004void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
7005 HandleShift(ushr);
7006}
7007
7008void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
7009 HandleShift(ushr);
7010}
7011
7012void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
7013 HandleBinaryOp(instruction);
7014}
7015
7016void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
7017 HandleBinaryOp(instruction);
7018}
7019
7020void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7021 // Nothing to do, this should be removed during prepare for register allocator.
7022 LOG(FATAL) << "Unreachable";
7023}
7024
7025void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7026 // Nothing to do, this should be removed during prepare for register allocator.
7027 LOG(FATAL) << "Unreachable";
7028}
7029
7030void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007031 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007032}
7033
7034void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007035 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007036}
7037
7038void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007039 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007040}
7041
7042void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007043 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007044}
7045
7046void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007047 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007048}
7049
7050void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007051 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007052}
7053
7054void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007055 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007056}
7057
7058void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007059 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007060}
7061
7062void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007063 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007064}
7065
7066void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007067 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007068}
7069
7070void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007071 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007072}
7073
7074void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007075 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007076}
7077
Aart Bike9f37602015-10-09 11:15:55 -07007078void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007079 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007080}
7081
7082void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007083 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007084}
7085
7086void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007087 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007088}
7089
7090void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007091 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007092}
7093
7094void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007095 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007096}
7097
7098void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007099 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007100}
7101
7102void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007103 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007104}
7105
7106void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007107 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007108}
7109
Mark Mendellfe57faa2015-09-18 09:26:15 -04007110// Simple implementation of packed switch - generate cascaded compare/jumps.
7111void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7112 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007113 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007114 locations->SetInAt(0, Location::RequiresRegister());
7115}
7116
Alexey Frunze0960ac52016-12-20 17:24:59 -08007117void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7118 int32_t lower_bound,
7119 uint32_t num_entries,
7120 HBasicBlock* switch_block,
7121 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007122 // Create a set of compare/jumps.
7123 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007124 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007125 // Jump to default if index is negative
7126 // Note: We don't check the case that index is positive while value < lower_bound, because in
7127 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7128 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7129
Alexey Frunze0960ac52016-12-20 17:24:59 -08007130 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007131 // Jump to successors[0] if value == lower_bound.
7132 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7133 int32_t last_index = 0;
7134 for (; num_entries - last_index > 2; last_index += 2) {
7135 __ Addiu(temp_reg, temp_reg, -2);
7136 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7137 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7138 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7139 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7140 }
7141 if (num_entries - last_index == 2) {
7142 // The last missing case_value.
7143 __ Addiu(temp_reg, temp_reg, -1);
7144 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007145 }
7146
7147 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007148 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007149 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007150 }
7151}
7152
Alexey Frunze0960ac52016-12-20 17:24:59 -08007153void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7154 int32_t lower_bound,
7155 uint32_t num_entries,
7156 HBasicBlock* switch_block,
7157 HBasicBlock* default_block) {
7158 // Create a jump table.
7159 std::vector<Mips64Label*> labels(num_entries);
7160 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7161 for (uint32_t i = 0; i < num_entries; i++) {
7162 labels[i] = codegen_->GetLabelOf(successors[i]);
7163 }
7164 JumpTable* table = __ CreateJumpTable(std::move(labels));
7165
7166 // Is the value in range?
7167 __ Addiu32(TMP, value_reg, -lower_bound);
7168 __ LoadConst32(AT, num_entries);
7169 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7170
7171 // We are in the range of the table.
7172 // Load the target address from the jump table, indexing by the value.
7173 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007174 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007175 __ Lw(TMP, TMP, 0);
7176 // Compute the absolute target address by adding the table start address
7177 // (the table contains offsets to targets relative to its start).
7178 __ Daddu(TMP, TMP, AT);
7179 // And jump.
7180 __ Jr(TMP);
7181 __ Nop();
7182}
7183
7184void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7185 int32_t lower_bound = switch_instr->GetStartValue();
7186 uint32_t num_entries = switch_instr->GetNumEntries();
7187 LocationSummary* locations = switch_instr->GetLocations();
7188 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7189 HBasicBlock* switch_block = switch_instr->GetBlock();
7190 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7191
7192 if (num_entries > kPackedSwitchJumpTableThreshold) {
7193 GenTableBasedPackedSwitch(value_reg,
7194 lower_bound,
7195 num_entries,
7196 switch_block,
7197 default_block);
7198 } else {
7199 GenPackedSwitchWithCompares(value_reg,
7200 lower_bound,
7201 num_entries,
7202 switch_block,
7203 default_block);
7204 }
7205}
7206
Chris Larsenc9905a62017-03-13 17:06:18 -07007207void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7208 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007209 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007210 locations->SetInAt(0, Location::RequiresRegister());
7211 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007212}
7213
Chris Larsenc9905a62017-03-13 17:06:18 -07007214void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7215 LocationSummary* locations = instruction->GetLocations();
7216 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7217 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7218 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7219 __ LoadFromOffset(kLoadDoubleword,
7220 locations->Out().AsRegister<GpuRegister>(),
7221 locations->InAt(0).AsRegister<GpuRegister>(),
7222 method_offset);
7223 } else {
7224 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7225 instruction->GetIndex(), kMips64PointerSize));
7226 __ LoadFromOffset(kLoadDoubleword,
7227 locations->Out().AsRegister<GpuRegister>(),
7228 locations->InAt(0).AsRegister<GpuRegister>(),
7229 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7230 __ LoadFromOffset(kLoadDoubleword,
7231 locations->Out().AsRegister<GpuRegister>(),
7232 locations->Out().AsRegister<GpuRegister>(),
7233 method_offset);
7234 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007235}
7236
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007237void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7238 ATTRIBUTE_UNUSED) {
7239 LOG(FATAL) << "Unreachable";
7240}
7241
7242void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7243 ATTRIBUTE_UNUSED) {
7244 LOG(FATAL) << "Unreachable";
7245}
7246
Alexey Frunze4dda3372015-06-01 18:31:49 -07007247} // namespace mips64
7248} // namespace art