blob: 08a6512febc36e11342af720aaceefe445e80ab6 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010034#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070037#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070038#include "utils/stack_checks.h"
39
40namespace art {
41namespace mips64 {
42
43static constexpr int kCurrentMethodStackOffset = 0;
44static constexpr GpuRegister kMethodRegisterArgument = A0;
45
Alexey Frunze4147fcc2017-06-17 19:57:27 -070046// Flags controlling the use of thunks for Baker read barriers.
47constexpr bool kBakerReadBarrierThunksEnableForFields = true;
48constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
49constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
50
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010051Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070052 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010053 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010054 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 case DataType::Type::kInt8:
56 case DataType::Type::kUint16:
57 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080058 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 case DataType::Type::kInt32:
60 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080061 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010062 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070063 return Location::RegisterLocation(V0);
64
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 case DataType::Type::kFloat32:
66 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070067 return Location::FpuRegisterLocation(F0);
68
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070070 return Location();
71 }
72 UNREACHABLE();
73}
74
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010075Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070076 return Mips64ReturnLocation(type);
77}
78
79Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
80 return Location::RegisterLocation(kMethodRegisterArgument);
81}
82
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010083Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070084 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010085 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070086 LOG(FATAL) << "Unexpected parameter type " << type;
87 }
88
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010089 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070090 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
91 next_location = Location::FpuRegisterLocation(
92 calling_convention.GetFpuRegisterAt(float_index_++));
93 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010094 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070095 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
96 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
97 float_index_++;
98 } else {
99 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100100 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
101 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700102 }
103
104 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100105 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700106
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107 return next_location;
108}
109
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100110Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111 return Mips64ReturnLocation(type);
112}
113
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100114// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
115#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700116#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117
118class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
119 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121
122 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
125 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000126 if (instruction_->CanThrowIntoCatchBlock()) {
127 // Live registers will be restored in the catch block if caught.
128 SaveLiveRegisters(codegen, instruction_->GetLocations());
129 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
132 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100133 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100136 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700137 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100138 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100139 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
140 ? kQuickThrowStringBounds
141 : kQuickThrowArrayBounds;
142 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100143 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700144 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
145 }
146
Alexandre Rames8158f282015-08-07 10:26:17 +0100147 bool IsFatal() const OVERRIDE { return true; }
148
Roland Levillain46648892015-06-19 16:07:18 +0100149 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
150
Alexey Frunze4dda3372015-06-01 18:31:49 -0700151 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700152 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
153};
154
155class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
156 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700157 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
158 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
162 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100163 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700164 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
165 }
166
Alexandre Rames8158f282015-08-07 10:26:17 +0100167 bool IsFatal() const OVERRIDE { return true; }
168
Roland Levillain46648892015-06-19 16:07:18 +0100169 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
170
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
173};
174
175class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
176 public:
177 LoadClassSlowPathMIPS64(HLoadClass* cls,
178 HInstruction* at,
179 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000180 bool do_clinit)
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700181 : SlowPathCodeMIPS64(at),
182 cls_(cls),
183 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000184 do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
186 }
187
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000189 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700192 InvokeRuntimeCallingConvention calling_convention;
193 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700194 __ Bind(GetEntryLabel());
195 SaveLiveRegisters(codegen, locations);
196
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000197 dex::TypeIndex type_index = cls_->GetTypeIndex();
198 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100199 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
200 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000201 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700202 if (do_clinit_) {
203 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
204 } else {
205 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
206 }
207
208 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700209 if (out.IsValid()) {
210 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100211 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700212 mips64_codegen->MoveLocation(out,
213 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
214 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700215 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700218 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700219 }
220
Roland Levillain46648892015-06-19 16:07:18 +0100221 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
222
Alexey Frunze4dda3372015-06-01 18:31:49 -0700223 private:
224 // The class this slow path will load.
225 HLoadClass* const cls_;
226
Alexey Frunze4dda3372015-06-01 18:31:49 -0700227 // The dex PC of `at_`.
228 const uint32_t dex_pc_;
229
230 // Whether to initialize the class.
231 const bool do_clinit_;
232
233 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
234};
235
236class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
237 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000238 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
239 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240
241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700242 DCHECK(instruction_->IsLoadString());
243 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700244 LocationSummary* locations = instruction_->GetLocations();
245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000246 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700247 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700248 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249 __ Bind(GetEntryLabel());
250 SaveLiveRegisters(codegen, locations);
251
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000252 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100253 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700254 instruction_,
255 instruction_->GetDexPc(),
256 this);
257 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700258
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100259 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700260 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700261 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800264
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700265 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 }
267
Roland Levillain46648892015-06-19 16:07:18 +0100268 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
269
Alexey Frunze4dda3372015-06-01 18:31:49 -0700270 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
272};
273
274class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
275 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000276 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700277
278 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
279 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
280 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000281 if (instruction_->CanThrowIntoCatchBlock()) {
282 // Live registers will be restored in the catch block if caught.
283 SaveLiveRegisters(codegen, instruction_->GetLocations());
284 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100285 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286 instruction_,
287 instruction_->GetDexPc(),
288 this);
289 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
290 }
291
Alexandre Rames8158f282015-08-07 10:26:17 +0100292 bool IsFatal() const OVERRIDE { return true; }
293
Roland Levillain46648892015-06-19 16:07:18 +0100294 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
295
Alexey Frunze4dda3372015-06-01 18:31:49 -0700296 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
298};
299
300class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
301 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100302 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000303 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700304
305 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200306 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700307 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
308 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200309 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100310 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700311 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200312 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700314 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700315 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700316 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700317 }
318 }
319
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700320 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700321 DCHECK(successor_ == nullptr);
322 return &return_label_;
323 }
324
Roland Levillain46648892015-06-19 16:07:18 +0100325 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
326
Chris Larsena2045912017-11-02 12:39:54 -0700327 HBasicBlock* GetSuccessor() const {
328 return successor_;
329 }
330
Alexey Frunze4dda3372015-06-01 18:31:49 -0700331 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700332 // If not null, the block to branch to after the suspend check.
333 HBasicBlock* const successor_;
334
335 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700336 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337
338 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
339};
340
341class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
342 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800343 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
344 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700345
346 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
347 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800348
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100349 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 DCHECK(instruction_->IsCheckCast()
351 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
352 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
353
354 __ Bind(GetEntryLabel());
Alexey Frunzedfc30af2018-01-24 16:25:10 -0800355 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800356 SaveLiveRegisters(codegen, locations);
357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358
359 // We're moving two locations to locations that could overlap, so we need a parallel
360 // move resolver.
361 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700363 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100364 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100369 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800370 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100371 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
373 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700378 }
379
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800380 if (!is_fatal_) {
381 RestoreLiveRegisters(codegen, locations);
382 __ Bc(GetExitLabel());
383 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700384 }
385
Roland Levillain46648892015-06-19 16:07:18 +0100386 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
387
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800388 bool IsFatal() const OVERRIDE { return is_fatal_; }
389
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800391 const bool is_fatal_;
392
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
394};
395
396class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
397 public:
Aart Bik42249c32016-01-07 15:33:50 -0800398 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000399 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400
401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800402 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700403 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 LocationSummary* locations = instruction_->GetLocations();
405 SaveLiveRegisters(codegen, locations);
406 InvokeRuntimeCallingConvention calling_convention;
407 __ LoadConst32(calling_convention.GetRegisterAt(0),
408 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100409 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100410 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700411 }
412
Roland Levillain46648892015-06-19 16:07:18 +0100413 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
414
Alexey Frunze4dda3372015-06-01 18:31:49 -0700415 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
417};
418
Alexey Frunze15958152017-02-09 19:08:30 -0800419class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
420 public:
421 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
422
423 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
424 LocationSummary* locations = instruction_->GetLocations();
425 __ Bind(GetEntryLabel());
426 SaveLiveRegisters(codegen, locations);
427
428 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100429 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800430 parallel_move.AddMove(
431 locations->InAt(0),
432 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100433 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800434 nullptr);
435 parallel_move.AddMove(
436 locations->InAt(1),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(2),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800444 nullptr);
445 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
446
447 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
448 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
449 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
450 RestoreLiveRegisters(codegen, locations);
451 __ Bc(GetExitLabel());
452 }
453
454 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
455
456 private:
457 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
458};
459
460// Slow path marking an object reference `ref` during a read
461// barrier. The field `obj.field` in the object `obj` holding this
462// reference does not get updated by this slow path after marking (see
463// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
464//
465// This means that after the execution of this slow path, `ref` will
466// always be up-to-date, but `obj.field` may not; i.e., after the
467// flip, `ref` will be a to-space reference, but `obj.field` will
468// probably still be a from-space reference (unless it gets updated by
469// another thread, or if another thread installed another object
470// reference (different from `ref`) in `obj.field`).
471//
472// If `entrypoint` is a valid location it is assumed to already be
473// holding the entrypoint. The case where the entrypoint is passed in
474// is for the GcRoot read barrier.
475class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
476 public:
477 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
478 Location ref,
479 Location entrypoint = Location::NoLocation())
480 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
481 DCHECK(kEmitCompilerReadBarrier);
482 }
483
484 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
485
486 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
487 LocationSummary* locations = instruction_->GetLocations();
488 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
489 DCHECK(locations->CanCall());
490 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
491 DCHECK(instruction_->IsInstanceFieldGet() ||
492 instruction_->IsStaticFieldGet() ||
493 instruction_->IsArrayGet() ||
494 instruction_->IsArraySet() ||
495 instruction_->IsLoadClass() ||
496 instruction_->IsLoadString() ||
497 instruction_->IsInstanceOf() ||
498 instruction_->IsCheckCast() ||
499 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
500 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
501 << "Unexpected instruction in read barrier marking slow path: "
502 << instruction_->DebugName();
503
504 __ Bind(GetEntryLabel());
505 // No need to save live registers; it's taken care of by the
506 // entrypoint. Also, there is no need to update the stack mask,
507 // as this runtime call will not trigger a garbage collection.
508 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
509 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
510 (S2 <= ref_reg && ref_reg <= S7) ||
511 (ref_reg == S8)) << ref_reg;
512 // "Compact" slow path, saving two moves.
513 //
514 // Instead of using the standard runtime calling convention (input
515 // and output in A0 and V0 respectively):
516 //
517 // A0 <- ref
518 // V0 <- ReadBarrierMark(A0)
519 // ref <- V0
520 //
521 // we just use rX (the register containing `ref`) as input and output
522 // of a dedicated entrypoint:
523 //
524 // rX <- ReadBarrierMarkRegX(rX)
525 //
526 if (entrypoint_.IsValid()) {
527 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
528 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
529 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
530 __ Nop();
531 } else {
532 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100533 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800534 // This runtime call does not require a stack map.
535 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
536 instruction_,
537 this);
538 }
539 __ Bc(GetExitLabel());
540 }
541
542 private:
543 // The location (register) of the marked object reference.
544 const Location ref_;
545
546 // The location of the entrypoint if already loaded.
547 const Location entrypoint_;
548
549 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
550};
551
552// Slow path marking an object reference `ref` during a read barrier,
553// and if needed, atomically updating the field `obj.field` in the
554// object `obj` holding this reference after marking (contrary to
555// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
556// `obj.field`).
557//
558// This means that after the execution of this slow path, both `ref`
559// and `obj.field` will be up-to-date; i.e., after the flip, both will
560// hold the same to-space reference (unless another thread installed
561// another object reference (different from `ref`) in `obj.field`).
562class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
563 public:
564 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
565 Location ref,
566 GpuRegister obj,
567 Location field_offset,
568 GpuRegister temp1)
569 : SlowPathCodeMIPS64(instruction),
570 ref_(ref),
571 obj_(obj),
572 field_offset_(field_offset),
573 temp1_(temp1) {
574 DCHECK(kEmitCompilerReadBarrier);
575 }
576
577 const char* GetDescription() const OVERRIDE {
578 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
579 }
580
581 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
582 LocationSummary* locations = instruction_->GetLocations();
583 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
584 DCHECK(locations->CanCall());
585 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
586 // This slow path is only used by the UnsafeCASObject intrinsic.
587 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
588 << "Unexpected instruction in read barrier marking and field updating slow path: "
589 << instruction_->DebugName();
590 DCHECK(instruction_->GetLocations()->Intrinsified());
591 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
592 DCHECK(field_offset_.IsRegister()) << field_offset_;
593
594 __ Bind(GetEntryLabel());
595
596 // Save the old reference.
597 // Note that we cannot use AT or TMP to save the old reference, as those
598 // are used by the code that follows, but we need the old reference after
599 // the call to the ReadBarrierMarkRegX entry point.
600 DCHECK_NE(temp1_, AT);
601 DCHECK_NE(temp1_, TMP);
602 __ Move(temp1_, ref_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
608 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
609 (S2 <= ref_reg && ref_reg <= S7) ||
610 (ref_reg == S8)) << ref_reg;
611 // "Compact" slow path, saving two moves.
612 //
613 // Instead of using the standard runtime calling convention (input
614 // and output in A0 and V0 respectively):
615 //
616 // A0 <- ref
617 // V0 <- ReadBarrierMark(A0)
618 // ref <- V0
619 //
620 // we just use rX (the register containing `ref`) as input and output
621 // of a dedicated entrypoint:
622 //
623 // rX <- ReadBarrierMarkRegX(rX)
624 //
625 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100626 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800627 // This runtime call does not require a stack map.
628 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
629 instruction_,
630 this);
631
632 // If the new reference is different from the old reference,
633 // update the field in the holder (`*(obj_ + field_offset_)`).
634 //
635 // Note that this field could also hold a different object, if
636 // another thread had concurrently changed it. In that case, the
637 // the compare-and-set (CAS) loop below would abort, leaving the
638 // field as-is.
639 Mips64Label done;
640 __ Beqc(temp1_, ref_reg, &done);
641
642 // Update the the holder's field atomically. This may fail if
643 // mutator updates before us, but it's OK. This is achieved
644 // using a strong compare-and-set (CAS) operation with relaxed
645 // memory synchronization ordering, where the expected value is
646 // the old reference and the desired value is the new reference.
647
648 // Convenience aliases.
649 GpuRegister base = obj_;
650 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
651 GpuRegister expected = temp1_;
652 GpuRegister value = ref_reg;
653 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
654 GpuRegister tmp = AT; // Value in memory.
655
656 __ Daddu(tmp_ptr, base, offset);
657
658 if (kPoisonHeapReferences) {
659 __ PoisonHeapReference(expected);
660 // Do not poison `value` if it is the same register as
661 // `expected`, which has just been poisoned.
662 if (value != expected) {
663 __ PoisonHeapReference(value);
664 }
665 }
666
667 // do {
668 // tmp = [r_ptr] - expected;
669 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
670
671 Mips64Label loop_head, exit_loop;
672 __ Bind(&loop_head);
673 __ Ll(tmp, tmp_ptr);
674 // The LL instruction sign-extends the 32-bit value, but
675 // 32-bit references must be zero-extended. Zero-extend `tmp`.
676 __ Dext(tmp, tmp, 0, 32);
677 __ Bnec(tmp, expected, &exit_loop);
678 __ Move(tmp, value);
679 __ Sc(tmp, tmp_ptr);
680 __ Beqzc(tmp, &loop_head);
681 __ Bind(&exit_loop);
682
683 if (kPoisonHeapReferences) {
684 __ UnpoisonHeapReference(expected);
685 // Do not unpoison `value` if it is the same register as
686 // `expected`, which has just been unpoisoned.
687 if (value != expected) {
688 __ UnpoisonHeapReference(value);
689 }
690 }
691
692 __ Bind(&done);
693 __ Bc(GetExitLabel());
694 }
695
696 private:
697 // The location (register) of the marked object reference.
698 const Location ref_;
699 // The register containing the object holding the marked object reference field.
700 const GpuRegister obj_;
701 // The location of the offset of the marked reference field within `obj_`.
702 Location field_offset_;
703
704 const GpuRegister temp1_;
705
706 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
707};
708
709// Slow path generating a read barrier for a heap reference.
710class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
711 public:
712 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
713 Location out,
714 Location ref,
715 Location obj,
716 uint32_t offset,
717 Location index)
718 : SlowPathCodeMIPS64(instruction),
719 out_(out),
720 ref_(ref),
721 obj_(obj),
722 offset_(offset),
723 index_(index) {
724 DCHECK(kEmitCompilerReadBarrier);
725 // If `obj` is equal to `out` or `ref`, it means the initial object
726 // has been overwritten by (or after) the heap object reference load
727 // to be instrumented, e.g.:
728 //
729 // __ LoadFromOffset(kLoadWord, out, out, offset);
730 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
731 //
732 // In that case, we have lost the information about the original
733 // object, and the emitted read barrier cannot work properly.
734 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
735 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
736 }
737
738 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
739 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
740 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100741 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800742 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
743 DCHECK(locations->CanCall());
744 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
745 DCHECK(instruction_->IsInstanceFieldGet() ||
746 instruction_->IsStaticFieldGet() ||
747 instruction_->IsArrayGet() ||
748 instruction_->IsInstanceOf() ||
749 instruction_->IsCheckCast() ||
750 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
751 << "Unexpected instruction in read barrier for heap reference slow path: "
752 << instruction_->DebugName();
753
754 __ Bind(GetEntryLabel());
755 SaveLiveRegisters(codegen, locations);
756
757 // We may have to change the index's value, but as `index_` is a
758 // constant member (like other "inputs" of this slow path),
759 // introduce a copy of it, `index`.
760 Location index = index_;
761 if (index_.IsValid()) {
762 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
763 if (instruction_->IsArrayGet()) {
764 // Compute the actual memory offset and store it in `index`.
765 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
766 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
767 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
768 // We are about to change the value of `index_reg` (see the
769 // calls to art::mips64::Mips64Assembler::Sll and
770 // art::mips64::MipsAssembler::Addiu32 below), but it has
771 // not been saved by the previous call to
772 // art::SlowPathCode::SaveLiveRegisters, as it is a
773 // callee-save register --
774 // art::SlowPathCode::SaveLiveRegisters does not consider
775 // callee-save registers, as it has been designed with the
776 // assumption that callee-save registers are supposed to be
777 // handled by the called function. So, as a callee-save
778 // register, `index_reg` _would_ eventually be saved onto
779 // the stack, but it would be too late: we would have
780 // changed its value earlier. Therefore, we manually save
781 // it here into another freely available register,
782 // `free_reg`, chosen of course among the caller-save
783 // registers (as a callee-save `free_reg` register would
784 // exhibit the same problem).
785 //
786 // Note we could have requested a temporary register from
787 // the register allocator instead; but we prefer not to, as
788 // this is a slow path, and we know we can find a
789 // caller-save register that is available.
790 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
791 __ Move(free_reg, index_reg);
792 index_reg = free_reg;
793 index = Location::RegisterLocation(index_reg);
794 } else {
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
797 // (as it is not a callee-save register), so we can freely
798 // use it.
799 }
800 // Shifting the index value contained in `index_reg` by the scale
801 // factor (2) cannot overflow in practice, as the runtime is
802 // unable to allocate object arrays with a size larger than
803 // 2^26 - 1 (that is, 2^28 - 4 bytes).
804 __ Sll(index_reg, index_reg, TIMES_4);
805 static_assert(
806 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
807 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
808 __ Addiu32(index_reg, index_reg, offset_);
809 } else {
810 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
811 // intrinsics, `index_` is not shifted by a scale factor of 2
812 // (as in the case of ArrayGet), as it is actually an offset
813 // to an object field within an object.
814 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
815 DCHECK(instruction_->GetLocations()->Intrinsified());
816 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
817 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
818 << instruction_->AsInvoke()->GetIntrinsic();
819 DCHECK_EQ(offset_, 0U);
820 DCHECK(index_.IsRegister());
821 }
822 }
823
824 // We're moving two or three locations to locations that could
825 // overlap, so we need a parallel move resolver.
826 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100827 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800828 parallel_move.AddMove(ref_,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800831 nullptr);
832 parallel_move.AddMove(obj_,
833 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100834 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800835 nullptr);
836 if (index.IsValid()) {
837 parallel_move.AddMove(index,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800840 nullptr);
841 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
842 } else {
843 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
844 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
845 }
846 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
847 instruction_,
848 instruction_->GetDexPc(),
849 this);
850 CheckEntrypointTypes<
851 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
852 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
853
854 RestoreLiveRegisters(codegen, locations);
855 __ Bc(GetExitLabel());
856 }
857
858 const char* GetDescription() const OVERRIDE {
859 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
860 }
861
862 private:
863 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
864 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
865 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
866 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
867 if (i != ref &&
868 i != obj &&
869 !codegen->IsCoreCalleeSaveRegister(i) &&
870 !codegen->IsBlockedCoreRegister(i)) {
871 return static_cast<GpuRegister>(i);
872 }
873 }
874 // We shall never fail to find a free caller-save register, as
875 // there are more than two core caller-save registers on MIPS64
876 // (meaning it is possible to find one which is different from
877 // `ref` and `obj`).
878 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
879 LOG(FATAL) << "Could not find a free caller-save register";
880 UNREACHABLE();
881 }
882
883 const Location out_;
884 const Location ref_;
885 const Location obj_;
886 const uint32_t offset_;
887 // An additional location containing an index to an array.
888 // Only used for HArrayGet and the UnsafeGetObject &
889 // UnsafeGetObjectVolatile intrinsics.
890 const Location index_;
891
892 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
893};
894
895// Slow path generating a read barrier for a GC root.
896class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
897 public:
898 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
899 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
900 DCHECK(kEmitCompilerReadBarrier);
901 }
902
903 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
904 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100905 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800906 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
907 DCHECK(locations->CanCall());
908 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
909 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
910 << "Unexpected instruction in read barrier for GC root slow path: "
911 << instruction_->DebugName();
912
913 __ Bind(GetEntryLabel());
914 SaveLiveRegisters(codegen, locations);
915
916 InvokeRuntimeCallingConvention calling_convention;
917 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
918 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
919 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100920 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800921 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
922 instruction_,
923 instruction_->GetDexPc(),
924 this);
925 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
926 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
927
928 RestoreLiveRegisters(codegen, locations);
929 __ Bc(GetExitLabel());
930 }
931
932 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
933
934 private:
935 const Location out_;
936 const Location root_;
937
938 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
939};
940
Alexey Frunze4dda3372015-06-01 18:31:49 -0700941CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
942 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100943 const CompilerOptions& compiler_options,
944 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700945 : CodeGenerator(graph,
946 kNumberOfGpuRegisters,
947 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000948 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700949 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
950 arraysize(kCoreCalleeSaves)),
951 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
952 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100953 compiler_options,
954 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100955 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700956 location_builder_(graph, this),
957 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100958 move_resolver_(graph->GetAllocator(), this),
959 assembler_(graph->GetAllocator(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800960 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800961 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100962 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800963 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100964 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000965 boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100966 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000967 boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100968 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko59eb30f2018-02-20 11:52:34 +0000969 boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100970 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800971 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800973 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100974 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700975 // Save RA (containing the return address) to mimic Quick.
976 AddAllocatedRegister(Location::RegisterLocation(RA));
977}
978
979#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100980// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
981#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700982#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700983
984void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700985 // Ensure that we fix up branches.
986 __ FinalizeCode();
987
988 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100989 StackMapStream* stack_map_stream = GetStackMapStream();
990 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
David Srbeckyd02b23f2018-05-29 23:27:22 +0100991 uint32_t old_position = stack_map_stream->GetStackMapNativePcOffset(i);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700992 uint32_t new_position = __ GetAdjustedPosition(old_position);
993 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100994 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700995 }
996
997 // Adjust pc offsets for the disassembly information.
998 if (disasm_info_ != nullptr) {
999 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1000 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1001 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1002 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1003 it.second.start = __ GetAdjustedPosition(it.second.start);
1004 it.second.end = __ GetAdjustedPosition(it.second.end);
1005 }
1006 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1007 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1008 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1009 }
1010 }
1011
Alexey Frunze4dda3372015-06-01 18:31:49 -07001012 CodeGenerator::Finalize(allocator);
1013}
1014
1015Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1016 return codegen_->GetAssembler();
1017}
1018
1019void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001020 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001021 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1022}
1023
1024void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001025 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001026 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1027}
1028
1029void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1030 // Pop reg
1031 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001032 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001033}
1034
1035void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1036 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001037 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001038 __ Sd(GpuRegister(reg), SP, 0);
1039}
1040
1041void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1042 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1043 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1044 // Allocate a scratch register other than TMP, if available.
1045 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1046 // automatically unspilled when the scratch scope object is destroyed).
1047 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1048 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001049 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001050 __ LoadFromOffset(load_type,
1051 GpuRegister(ensure_scratch.GetRegister()),
1052 SP,
1053 index1 + stack_offset);
1054 __ LoadFromOffset(load_type,
1055 TMP,
1056 SP,
1057 index2 + stack_offset);
1058 __ StoreToOffset(store_type,
1059 GpuRegister(ensure_scratch.GetRegister()),
1060 SP,
1061 index2 + stack_offset);
1062 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1063}
1064
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001065void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1066 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1067 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1068 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1069 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1070}
1071
Alexey Frunze4dda3372015-06-01 18:31:49 -07001072static dwarf::Reg DWARFReg(GpuRegister reg) {
1073 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1074}
1075
David Srbeckyba702002016-02-01 18:15:29 +00001076static dwarf::Reg DWARFReg(FpuRegister reg) {
1077 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1078}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001079
1080void CodeGeneratorMIPS64::GenerateFrameEntry() {
1081 __ Bind(&frame_entry_label_);
1082
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001083 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01001084 __ Lhu(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
1085 __ Addiu(TMP, TMP, 1);
1086 __ Sh(TMP, kMethodRegisterArgument, ArtMethod::HotnessCountOffset().Int32Value());
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001087 }
1088
Vladimir Marko33bff252017-11-01 14:35:42 +00001089 bool do_overflow_check =
1090 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001091
1092 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001093 __ LoadFromOffset(
1094 kLoadWord,
1095 ZERO,
1096 SP,
1097 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001098 RecordPcInfo(nullptr, 0);
1099 }
1100
Alexey Frunze4dda3372015-06-01 18:31:49 -07001101 if (HasEmptyFrame()) {
1102 return;
1103 }
1104
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001105 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001106 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1107 LOG(FATAL) << "Stack frame larger than "
1108 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001109 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001110
1111 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001112
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001113 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001114 __ IncreaseFrameSize(ofs);
1115
1116 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1117 GpuRegister reg = kCoreCalleeSaves[i];
1118 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001119 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001120 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001121 __ cfi().RelOffset(DWARFReg(reg), ofs);
1122 }
1123 }
1124
1125 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1126 FpuRegister reg = kFpuCalleeSaves[i];
1127 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001128 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001129 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001130 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001131 }
1132 }
1133
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001134 // Save the current method if we need it. Note that we do not
1135 // do this in HCurrentMethod, as the instruction might have been removed
1136 // in the SSA graph.
1137 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001138 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001139 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001140
1141 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1142 // Initialize should_deoptimize flag to 0.
1143 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1144 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001145}
1146
1147void CodeGeneratorMIPS64::GenerateFrameExit() {
1148 __ cfi().RememberState();
1149
Alexey Frunze4dda3372015-06-01 18:31:49 -07001150 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001151 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001152
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001153 // For better instruction scheduling restore RA before other registers.
1154 uint32_t ofs = GetFrameSize();
1155 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001156 GpuRegister reg = kCoreCalleeSaves[i];
1157 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001158 ofs -= kMips64DoublewordSize;
1159 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001160 __ cfi().Restore(DWARFReg(reg));
1161 }
1162 }
1163
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001164 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1165 FpuRegister reg = kFpuCalleeSaves[i];
1166 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1167 ofs -= kMips64DoublewordSize;
1168 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1169 __ cfi().Restore(DWARFReg(reg));
1170 }
1171 }
1172
1173 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001174 }
1175
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001176 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001177
1178 __ cfi().RestoreState();
1179 __ cfi().DefCFAOffset(GetFrameSize());
1180}
1181
1182void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1183 __ Bind(GetLabelOf(block));
1184}
1185
1186void CodeGeneratorMIPS64::MoveLocation(Location destination,
1187 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001188 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001189 if (source.Equals(destination)) {
1190 return;
1191 }
1192
1193 // A valid move can always be inferred from the destination and source
1194 // locations. When moving from and to a register, the argument type can be
1195 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001196 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001197 DCHECK_EQ(unspecified_type, false);
1198
1199 if (destination.IsRegister() || destination.IsFpuRegister()) {
1200 if (unspecified_type) {
1201 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1202 if (source.IsStackSlot() ||
1203 (src_cst != nullptr && (src_cst->IsIntConstant()
1204 || src_cst->IsFloatConstant()
1205 || src_cst->IsNullConstant()))) {
1206 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001207 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001208 } else {
1209 // If the source is a double stack slot or a 64bit constant, a 64bit
1210 // type is appropriate. Else the source is a register, and since the
1211 // type has not been specified, we chose a 64bit type to force a 64bit
1212 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001213 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001214 }
1215 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001216 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1217 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001218 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1219 // Move to GPR/FPR from stack
1220 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001221 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001222 __ LoadFpuFromOffset(load_type,
1223 destination.AsFpuRegister<FpuRegister>(),
1224 SP,
1225 source.GetStackIndex());
1226 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001227 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001228 __ LoadFromOffset(load_type,
1229 destination.AsRegister<GpuRegister>(),
1230 SP,
1231 source.GetStackIndex());
1232 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001233 } else if (source.IsSIMDStackSlot()) {
1234 __ LoadFpuFromOffset(kLoadQuadword,
1235 destination.AsFpuRegister<FpuRegister>(),
1236 SP,
1237 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001238 } else if (source.IsConstant()) {
1239 // Move to GPR/FPR from constant
1240 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001241 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001242 gpr = destination.AsRegister<GpuRegister>();
1243 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001244 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001245 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001246 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001247 gpr = ZERO;
1248 } else {
1249 __ LoadConst32(gpr, value);
1250 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001251 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001252 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001253 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001254 gpr = ZERO;
1255 } else {
1256 __ LoadConst64(gpr, value);
1257 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001258 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001259 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001260 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001261 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001262 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1263 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001264 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001265 if (destination.IsRegister()) {
1266 // Move to GPR from GPR
1267 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1268 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001269 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001270 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001271 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1272 } else {
1273 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1274 }
1275 }
1276 } else if (source.IsFpuRegister()) {
1277 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001278 if (GetGraph()->HasSIMD()) {
1279 __ MoveV(VectorRegisterFrom(destination),
1280 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001281 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001282 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001283 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001284 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1285 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001286 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001287 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1288 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001289 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001290 } else {
1291 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001292 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001293 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1294 } else {
1295 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1296 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001297 }
1298 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001299 } else if (destination.IsSIMDStackSlot()) {
1300 if (source.IsFpuRegister()) {
1301 __ StoreFpuToOffset(kStoreQuadword,
1302 source.AsFpuRegister<FpuRegister>(),
1303 SP,
1304 destination.GetStackIndex());
1305 } else {
1306 DCHECK(source.IsSIMDStackSlot());
1307 __ LoadFpuFromOffset(kLoadQuadword,
1308 FTMP,
1309 SP,
1310 source.GetStackIndex());
1311 __ StoreFpuToOffset(kStoreQuadword,
1312 FTMP,
1313 SP,
1314 destination.GetStackIndex());
1315 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001316 } else { // The destination is not a register. It must be a stack slot.
1317 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1318 if (source.IsRegister() || source.IsFpuRegister()) {
1319 if (unspecified_type) {
1320 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001321 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001322 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001323 dst_type =
1324 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001325 }
1326 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001327 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1328 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001329 // Move to stack from GPR/FPR
1330 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1331 if (source.IsRegister()) {
1332 __ StoreToOffset(store_type,
1333 source.AsRegister<GpuRegister>(),
1334 SP,
1335 destination.GetStackIndex());
1336 } else {
1337 __ StoreFpuToOffset(store_type,
1338 source.AsFpuRegister<FpuRegister>(),
1339 SP,
1340 destination.GetStackIndex());
1341 }
1342 } else if (source.IsConstant()) {
1343 // Move to stack from constant
1344 HConstant* src_cst = source.GetConstant();
1345 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001346 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001347 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001348 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1349 if (value != 0) {
1350 gpr = TMP;
1351 __ LoadConst32(gpr, value);
1352 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001353 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001354 DCHECK(destination.IsDoubleStackSlot());
1355 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1356 if (value != 0) {
1357 gpr = TMP;
1358 __ LoadConst64(gpr, value);
1359 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001360 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001361 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001362 } else {
1363 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1364 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1365 // Move to stack from stack
1366 if (destination.IsStackSlot()) {
1367 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1368 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1369 } else {
1370 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1371 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1372 }
1373 }
1374 }
1375}
1376
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001377void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001378 DCHECK(!loc1.IsConstant());
1379 DCHECK(!loc2.IsConstant());
1380
1381 if (loc1.Equals(loc2)) {
1382 return;
1383 }
1384
1385 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1386 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001387 bool is_simd1 = loc1.IsSIMDStackSlot();
1388 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001389 bool is_fp_reg1 = loc1.IsFpuRegister();
1390 bool is_fp_reg2 = loc2.IsFpuRegister();
1391
1392 if (loc2.IsRegister() && loc1.IsRegister()) {
1393 // Swap 2 GPRs
1394 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1395 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1396 __ Move(TMP, r2);
1397 __ Move(r2, r1);
1398 __ Move(r1, TMP);
1399 } else if (is_fp_reg2 && is_fp_reg1) {
1400 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001401 if (GetGraph()->HasSIMD()) {
1402 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1403 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1404 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001405 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001406 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1407 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1408 if (type == DataType::Type::kFloat32) {
1409 __ MovS(FTMP, r1);
1410 __ MovS(r1, r2);
1411 __ MovS(r2, FTMP);
1412 } else {
1413 DCHECK_EQ(type, DataType::Type::kFloat64);
1414 __ MovD(FTMP, r1);
1415 __ MovD(r1, r2);
1416 __ MovD(r2, FTMP);
1417 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001418 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001419 } else if (is_slot1 != is_slot2) {
1420 // Swap GPR/FPR and stack slot
1421 Location reg_loc = is_slot1 ? loc2 : loc1;
1422 Location mem_loc = is_slot1 ? loc1 : loc2;
1423 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1424 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001425 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001426 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1427 if (reg_loc.IsFpuRegister()) {
1428 __ StoreFpuToOffset(store_type,
1429 reg_loc.AsFpuRegister<FpuRegister>(),
1430 SP,
1431 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001432 if (mem_loc.IsStackSlot()) {
1433 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1434 } else {
1435 DCHECK(mem_loc.IsDoubleStackSlot());
1436 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1437 }
1438 } else {
1439 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1440 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1441 }
1442 } else if (is_slot1 && is_slot2) {
1443 move_resolver_.Exchange(loc1.GetStackIndex(),
1444 loc2.GetStackIndex(),
1445 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001446 } else if (is_simd1 && is_simd2) {
1447 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1448 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1449 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1450 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1451 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1452 __ StoreFpuToOffset(kStoreQuadword,
1453 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1454 SP,
1455 mem_loc.GetStackIndex());
1456 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001457 } else {
1458 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1459 }
1460}
1461
Calin Juravle175dc732015-08-25 15:42:32 +01001462void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1463 DCHECK(location.IsRegister());
1464 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1465}
1466
Calin Juravlee460d1d2015-09-29 04:52:17 +01001467void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1468 if (location.IsRegister()) {
1469 locations->AddTemp(location);
1470 } else {
1471 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1472 }
1473}
1474
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001475void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1476 GpuRegister value,
1477 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001478 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001479 GpuRegister card = AT;
1480 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001481 if (value_can_be_null) {
1482 __ Beqzc(value, &done);
1483 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001484 __ LoadFromOffset(kLoadDoubleword,
1485 card,
1486 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001487 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001488 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1489 __ Daddu(temp, card, temp);
1490 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001491 if (value_can_be_null) {
1492 __ Bind(&done);
1493 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001494}
1495
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001496template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001497inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1498 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001499 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001500 for (const PcRelativePatchInfo& info : infos) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001501 const DexFile* dex_file = info.target_dex_file;
Alexey Frunze19f6c692016-11-30 19:19:55 -08001502 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001503 DCHECK(info.label.IsBound());
1504 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1505 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1506 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001507 linker_patches->push_back(Factory(literal_offset, dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001508 }
1509}
1510
Vladimir Markob066d432018-01-03 13:14:37 +00001511linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset,
1512 const DexFile* target_dex_file,
1513 uint32_t pc_insn_offset,
1514 uint32_t boot_image_offset) {
1515 DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null.
1516 return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset);
1517}
1518
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001519void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001520 DCHECK(linker_patches->empty());
1521 size_t size =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001522 boot_image_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001523 method_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001524 boot_image_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001525 type_bss_entry_patches_.size() +
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001526 boot_image_string_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001527 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001528 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001529 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001530 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001531 boot_image_method_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001532 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001533 boot_image_type_patches_, linker_patches);
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001534 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001535 boot_image_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001536 } else {
Vladimir Markob066d432018-01-03 13:14:37 +00001537 EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>(
1538 boot_image_method_patches_, linker_patches);
Vladimir Markoe47f60c2018-02-21 13:43:28 +00001539 DCHECK(boot_image_type_patches_.empty());
1540 DCHECK(boot_image_string_patches_.empty());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001541 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001542 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1543 method_bss_entry_patches_, linker_patches);
1544 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1545 type_bss_entry_patches_, linker_patches);
1546 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1547 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001548 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001549}
1550
Vladimir Markob066d432018-01-03 13:14:37 +00001551CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageRelRoPatch(
1552 uint32_t boot_image_offset,
1553 const PcRelativePatchInfo* info_high) {
1554 return NewPcRelativePatch(
1555 /* dex_file */ nullptr, boot_image_offset, info_high, &boot_image_method_patches_);
1556}
1557
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001558CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001559 MethodReference target_method,
1560 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001561 return NewPcRelativePatch(
1562 target_method.dex_file, target_method.index, info_high, &boot_image_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001563}
1564
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001565CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001566 MethodReference target_method,
1567 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001568 return NewPcRelativePatch(
1569 target_method.dex_file, target_method.index, info_high, &method_bss_entry_patches_);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001570}
1571
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001572CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001573 const DexFile& dex_file,
1574 dex::TypeIndex type_index,
1575 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001576 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &boot_image_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001577}
1578
Vladimir Marko1998cd02017-01-13 13:02:58 +00001579CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001580 const DexFile& dex_file,
1581 dex::TypeIndex type_index,
1582 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001583 return NewPcRelativePatch(&dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001584}
1585
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001586CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001587 const DexFile& dex_file,
1588 dex::StringIndex string_index,
1589 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001590 return NewPcRelativePatch(
1591 &dex_file, string_index.index_, info_high, &boot_image_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001592}
1593
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001594CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1595 const DexFile& dex_file,
1596 dex::StringIndex string_index,
1597 const PcRelativePatchInfo* info_high) {
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001598 return NewPcRelativePatch(&dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001599}
1600
Alexey Frunze19f6c692016-11-30 19:19:55 -08001601CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Vladimir Marko59eb30f2018-02-20 11:52:34 +00001602 const DexFile* dex_file,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001603 uint32_t offset_or_index,
1604 const PcRelativePatchInfo* info_high,
1605 ArenaDeque<PcRelativePatchInfo>* patches) {
1606 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001607 return &patches->back();
1608}
1609
Alexey Frunzef63f5692016-12-13 17:43:11 -08001610Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1611 return map->GetOrCreate(
1612 value,
1613 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1614}
1615
Alexey Frunze19f6c692016-11-30 19:19:55 -08001616Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1617 return uint64_literals_.GetOrCreate(
1618 value,
1619 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1620}
1621
Alexey Frunzef63f5692016-12-13 17:43:11 -08001622Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001623 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001624}
1625
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001626void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1627 GpuRegister out,
1628 PcRelativePatchInfo* info_low) {
1629 DCHECK(!info_high->patch_info_high);
1630 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001631 // Add the high half of a 32-bit offset to PC.
1632 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001633 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001634 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001635 if (info_low != nullptr) {
1636 DCHECK_EQ(info_low->patch_info_high, info_high);
1637 __ Bind(&info_low->label);
1638 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001639}
1640
Alexey Frunze627c1a02017-01-30 19:28:14 -08001641Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1642 dex::StringIndex string_index,
1643 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001644 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001645 return jit_string_patches_.GetOrCreate(
1646 StringReference(&dex_file, string_index),
1647 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1648}
1649
1650Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1651 dex::TypeIndex type_index,
1652 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001653 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001654 return jit_class_patches_.GetOrCreate(
1655 TypeReference(&dex_file, type_index),
1656 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1657}
1658
1659void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1660 const uint8_t* roots_data,
1661 const Literal* literal,
1662 uint64_t index_in_table) const {
1663 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1664 uintptr_t address =
1665 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1666 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1667}
1668
1669void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1670 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001671 const StringReference& string_reference = entry.first;
1672 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001673 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001674 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001675 }
1676 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001677 const TypeReference& type_reference = entry.first;
1678 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001679 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001680 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001681 }
1682}
1683
David Brazdil58282f42016-01-14 12:45:10 +00001684void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001685 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1686 blocked_core_registers_[ZERO] = true;
1687 blocked_core_registers_[K0] = true;
1688 blocked_core_registers_[K1] = true;
1689 blocked_core_registers_[GP] = true;
1690 blocked_core_registers_[SP] = true;
1691 blocked_core_registers_[RA] = true;
1692
Lazar Trsicd9672662015-09-03 17:33:01 +02001693 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1694 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001695 blocked_core_registers_[AT] = true;
1696 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001697 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001698 blocked_fpu_registers_[FTMP] = true;
1699
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001700 if (GetInstructionSetFeatures().HasMsa()) {
1701 // To be used just for MSA instructions.
1702 blocked_fpu_registers_[FTMP2] = true;
1703 }
1704
Alexey Frunze4dda3372015-06-01 18:31:49 -07001705 // Reserve suspend and thread registers.
1706 blocked_core_registers_[S0] = true;
1707 blocked_core_registers_[TR] = true;
1708
1709 // Reserve T9 for function calls
1710 blocked_core_registers_[T9] = true;
1711
Goran Jakovljevic782be112016-06-21 12:39:04 +02001712 if (GetGraph()->IsDebuggable()) {
1713 // Stubs do not save callee-save floating point registers. If the graph
1714 // is debuggable, we need to deal with these registers differently. For
1715 // now, just block them.
1716 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1717 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1718 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001719 }
1720}
1721
Alexey Frunze4dda3372015-06-01 18:31:49 -07001722size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1723 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001724 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001725}
1726
1727size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1728 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001729 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001730}
1731
1732size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001733 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1734 FpuRegister(reg_id),
1735 SP,
1736 stack_index);
1737 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001738}
1739
1740size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001741 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1742 FpuRegister(reg_id),
1743 SP,
1744 stack_index);
1745 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001746}
1747
1748void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001749 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001750}
1751
1752void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001753 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001754}
1755
Calin Juravle175dc732015-08-25 15:42:32 +01001756void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001757 HInstruction* instruction,
1758 uint32_t dex_pc,
1759 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001760 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001761 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001762 if (EntrypointRequiresStackMap(entrypoint)) {
1763 RecordPcInfo(instruction, dex_pc, slow_path);
1764 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001765}
1766
Alexey Frunze15958152017-02-09 19:08:30 -08001767void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1768 HInstruction* instruction,
1769 SlowPathCode* slow_path) {
1770 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1771 GenerateInvokeRuntime(entry_point_offset);
1772}
1773
1774void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1775 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1776 __ Jalr(T9);
1777 __ Nop();
1778}
1779
Alexey Frunze4dda3372015-06-01 18:31:49 -07001780void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1781 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001782 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1783 const size_t status_byte_offset =
1784 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1785 constexpr uint32_t shifted_initialized_value =
1786 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1787
1788 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
Lena Djokic3177e102018-02-28 11:32:40 +01001789 __ Sltiu(TMP, TMP, shifted_initialized_value);
1790 __ Bnezc(TMP, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001791 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1792 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001793 __ Bind(slow_path->GetExitLabel());
1794}
1795
Vladimir Marko175e7862018-03-27 09:03:13 +00001796void InstructionCodeGeneratorMIPS64::GenerateBitstringTypeCheckCompare(HTypeCheckInstruction* check,
1797 GpuRegister temp) {
1798 uint32_t path_to_root = check->GetBitstringPathToRoot();
1799 uint32_t mask = check->GetBitstringMask();
1800 DCHECK(IsPowerOfTwo(mask + 1));
1801 size_t mask_bits = WhichPowerOf2(mask + 1);
1802
1803 if (mask_bits == 16u) {
1804 // Load only the bitstring part of the status word.
1805 __ LoadFromOffset(
1806 kLoadUnsignedHalfword, temp, temp, mirror::Class::StatusOffset().Int32Value());
1807 // Compare the bitstring bits using XOR.
1808 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1809 } else {
1810 // /* uint32_t */ temp = temp->status_
1811 __ LoadFromOffset(kLoadWord, temp, temp, mirror::Class::StatusOffset().Int32Value());
1812 // Compare the bitstring bits using XOR.
1813 if (IsUint<16>(path_to_root)) {
1814 __ Xori(temp, temp, dchecked_integral_cast<uint16_t>(path_to_root));
1815 } else {
1816 __ LoadConst32(TMP, path_to_root);
1817 __ Xor(temp, temp, TMP);
1818 }
1819 // Shift out bits that do not contribute to the comparison.
1820 __ Sll(temp, temp, 32 - mask_bits);
1821 }
1822}
1823
Alexey Frunze4dda3372015-06-01 18:31:49 -07001824void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1825 __ Sync(0); // only stype 0 is supported
1826}
1827
1828void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1829 HBasicBlock* successor) {
1830 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001831 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1832
1833 if (slow_path == nullptr) {
1834 slow_path =
1835 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1836 instruction->SetSlowPath(slow_path);
1837 codegen_->AddSlowPath(slow_path);
1838 if (successor != nullptr) {
1839 DCHECK(successor->IsLoopHeader());
1840 }
1841 } else {
1842 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1843 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001844
1845 __ LoadFromOffset(kLoadUnsignedHalfword,
1846 TMP,
1847 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001848 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001849 if (successor == nullptr) {
1850 __ Bnezc(TMP, slow_path->GetEntryLabel());
1851 __ Bind(slow_path->GetReturnLabel());
1852 } else {
1853 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001854 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001855 // slow_path will return to GetLabelOf(successor).
1856 }
1857}
1858
1859InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1860 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001861 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001862 assembler_(codegen->GetAssembler()),
1863 codegen_(codegen) {}
1864
1865void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1866 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001867 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001868 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001869 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001870 case DataType::Type::kInt32:
1871 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001872 locations->SetInAt(0, Location::RequiresRegister());
1873 HInstruction* right = instruction->InputAt(1);
1874 bool can_use_imm = false;
1875 if (right->IsConstant()) {
1876 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1877 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1878 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001879 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001880 DCHECK(instruction->IsAdd() || instruction->IsSub());
1881 bool single_use = right->GetUses().HasExactlyOneElement();
1882 if (instruction->IsSub()) {
1883 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1884 imm = -imm;
1885 }
1886 }
1887 if (type == DataType::Type::kInt32) {
1888 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1889 } else {
1890 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1891 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001892 }
1893 }
1894 if (can_use_imm)
1895 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1896 else
1897 locations->SetInAt(1, Location::RequiresRegister());
1898 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1899 }
1900 break;
1901
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001902 case DataType::Type::kFloat32:
1903 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001904 locations->SetInAt(0, Location::RequiresFpuRegister());
1905 locations->SetInAt(1, Location::RequiresFpuRegister());
1906 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1907 break;
1908
1909 default:
1910 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1911 }
1912}
1913
1914void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001915 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001916 LocationSummary* locations = instruction->GetLocations();
1917
1918 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001919 case DataType::Type::kInt32:
1920 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001921 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1922 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1923 Location rhs_location = locations->InAt(1);
1924
1925 GpuRegister rhs_reg = ZERO;
1926 int64_t rhs_imm = 0;
1927 bool use_imm = rhs_location.IsConstant();
1928 if (use_imm) {
1929 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1930 } else {
1931 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1932 }
1933
1934 if (instruction->IsAnd()) {
1935 if (use_imm)
1936 __ Andi(dst, lhs, rhs_imm);
1937 else
1938 __ And(dst, lhs, rhs_reg);
1939 } else if (instruction->IsOr()) {
1940 if (use_imm)
1941 __ Ori(dst, lhs, rhs_imm);
1942 else
1943 __ Or(dst, lhs, rhs_reg);
1944 } else if (instruction->IsXor()) {
1945 if (use_imm)
1946 __ Xori(dst, lhs, rhs_imm);
1947 else
1948 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01001949 } else if (instruction->IsAdd() || instruction->IsSub()) {
1950 if (instruction->IsSub()) {
1951 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001952 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001953 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01001954 if (use_imm) {
1955 if (IsInt<16>(rhs_imm)) {
1956 __ Addiu(dst, lhs, rhs_imm);
1957 } else {
1958 int16_t rhs_imm_high = High16Bits(rhs_imm);
1959 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1960 if (rhs_imm_low < 0) {
1961 rhs_imm_high += 1;
1962 }
1963 __ Aui(dst, lhs, rhs_imm_high);
1964 if (rhs_imm_low != 0) {
1965 __ Addiu(dst, dst, rhs_imm_low);
1966 }
1967 }
1968 } else {
1969 if (instruction->IsAdd()) {
1970 __ Addu(dst, lhs, rhs_reg);
1971 } else {
1972 DCHECK(instruction->IsSub());
1973 __ Subu(dst, lhs, rhs_reg);
1974 }
1975 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001976 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001977 if (use_imm) {
1978 if (IsInt<16>(rhs_imm)) {
1979 __ Daddiu(dst, lhs, rhs_imm);
1980 } else if (IsInt<32>(rhs_imm)) {
1981 int16_t rhs_imm_high = High16Bits(rhs_imm);
1982 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1983 bool overflow_hi16 = false;
1984 if (rhs_imm_low < 0) {
1985 rhs_imm_high += 1;
1986 overflow_hi16 = (rhs_imm_high == -32768);
1987 }
1988 __ Daui(dst, lhs, rhs_imm_high);
1989 if (rhs_imm_low != 0) {
1990 __ Daddiu(dst, dst, rhs_imm_low);
1991 }
1992 if (overflow_hi16) {
1993 __ Dahi(dst, 1);
1994 }
1995 } else {
1996 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
1997 if (rhs_imm_low < 0) {
1998 rhs_imm += (INT64_C(1) << 16);
1999 }
2000 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
2001 if (rhs_imm_upper < 0) {
2002 rhs_imm += (INT64_C(1) << 32);
2003 }
2004 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
2005 if (rhs_imm_high < 0) {
2006 rhs_imm += (INT64_C(1) << 48);
2007 }
2008 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
2009 GpuRegister tmp = lhs;
2010 if (rhs_imm_low != 0) {
2011 __ Daddiu(dst, tmp, rhs_imm_low);
2012 tmp = dst;
2013 }
2014 // Dahi and Dati must use the same input and output register, so we have to initialize
2015 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
2016 // Daui(dst, lhs, 0).
2017 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
2018 __ Daui(dst, tmp, rhs_imm_upper);
2019 }
2020 if (rhs_imm_high != 0) {
2021 __ Dahi(dst, rhs_imm_high);
2022 }
2023 if (rhs_imm_top != 0) {
2024 __ Dati(dst, rhs_imm_top);
2025 }
2026 }
2027 } else if (instruction->IsAdd()) {
2028 __ Daddu(dst, lhs, rhs_reg);
2029 } else {
2030 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002031 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01002032 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002033 }
2034 }
2035 break;
2036 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002037 case DataType::Type::kFloat32:
2038 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002039 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2040 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2041 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2042 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002043 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002044 __ AddS(dst, lhs, rhs);
2045 else
2046 __ AddD(dst, lhs, rhs);
2047 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002048 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002049 __ SubS(dst, lhs, rhs);
2050 else
2051 __ SubD(dst, lhs, rhs);
2052 } else {
2053 LOG(FATAL) << "Unexpected floating-point binary operation";
2054 }
2055 break;
2056 }
2057 default:
2058 LOG(FATAL) << "Unexpected binary operation type " << type;
2059 }
2060}
2061
2062void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002063 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002064
Vladimir Markoca6fff82017-10-03 14:49:14 +01002065 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002066 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002067 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002068 case DataType::Type::kInt32:
2069 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002070 locations->SetInAt(0, Location::RequiresRegister());
2071 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002072 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002073 break;
2074 }
2075 default:
2076 LOG(FATAL) << "Unexpected shift type " << type;
2077 }
2078}
2079
2080void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002081 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002082 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002083 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002084
2085 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002086 case DataType::Type::kInt32:
2087 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002088 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2089 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2090 Location rhs_location = locations->InAt(1);
2091
2092 GpuRegister rhs_reg = ZERO;
2093 int64_t rhs_imm = 0;
2094 bool use_imm = rhs_location.IsConstant();
2095 if (use_imm) {
2096 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2097 } else {
2098 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2099 }
2100
2101 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002102 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002103 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002104
Alexey Frunze92d90602015-12-18 18:16:36 -08002105 if (shift_value == 0) {
2106 if (dst != lhs) {
2107 __ Move(dst, lhs);
2108 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002109 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002110 if (instr->IsShl()) {
2111 __ Sll(dst, lhs, shift_value);
2112 } else if (instr->IsShr()) {
2113 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002114 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002115 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002116 } else {
2117 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002118 }
2119 } else {
2120 if (shift_value < 32) {
2121 if (instr->IsShl()) {
2122 __ Dsll(dst, lhs, shift_value);
2123 } else if (instr->IsShr()) {
2124 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002125 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002126 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002127 } else {
2128 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002129 }
2130 } else {
2131 shift_value -= 32;
2132 if (instr->IsShl()) {
2133 __ Dsll32(dst, lhs, shift_value);
2134 } else if (instr->IsShr()) {
2135 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002136 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002137 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002138 } else {
2139 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002140 }
2141 }
2142 }
2143 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002144 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002145 if (instr->IsShl()) {
2146 __ Sllv(dst, lhs, rhs_reg);
2147 } else if (instr->IsShr()) {
2148 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002149 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002150 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002151 } else {
2152 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002153 }
2154 } else {
2155 if (instr->IsShl()) {
2156 __ Dsllv(dst, lhs, rhs_reg);
2157 } else if (instr->IsShr()) {
2158 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002159 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002160 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002161 } else {
2162 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002163 }
2164 }
2165 }
2166 break;
2167 }
2168 default:
2169 LOG(FATAL) << "Unexpected shift operation type " << type;
2170 }
2171}
2172
2173void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2174 HandleBinaryOp(instruction);
2175}
2176
2177void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2178 HandleBinaryOp(instruction);
2179}
2180
2181void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2182 HandleBinaryOp(instruction);
2183}
2184
2185void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2186 HandleBinaryOp(instruction);
2187}
2188
2189void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002190 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002191 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002192 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002193 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002194 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2195 object_array_get_with_read_barrier
2196 ? LocationSummary::kCallOnSlowPath
2197 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002198 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2199 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2200 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002201 locations->SetInAt(0, Location::RequiresRegister());
2202 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002203 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002204 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2205 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002206 // The output overlaps in the case of an object array get with
2207 // read barriers enabled: we do not want the move to overwrite the
2208 // array's location, as we need it to emit the read barrier.
2209 locations->SetOut(Location::RequiresRegister(),
2210 object_array_get_with_read_barrier
2211 ? Location::kOutputOverlap
2212 : Location::kNoOutputOverlap);
2213 }
2214 // We need a temporary register for the read barrier marking slow
2215 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2216 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002217 bool temp_needed = instruction->GetIndex()->IsConstant()
2218 ? !kBakerReadBarrierThunksEnableForFields
2219 : !kBakerReadBarrierThunksEnableForArrays;
2220 if (temp_needed) {
2221 locations->AddTemp(Location::RequiresRegister());
2222 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002223 }
2224}
2225
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002226static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2227 auto null_checker = [codegen, instruction]() {
2228 codegen->MaybeRecordImplicitNullCheck(instruction);
2229 };
2230 return null_checker;
2231}
2232
Alexey Frunze4dda3372015-06-01 18:31:49 -07002233void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2234 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002235 Location obj_loc = locations->InAt(0);
2236 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2237 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002238 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002239 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002240 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002241
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002242 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002243 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2244 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002245 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002246 case DataType::Type::kBool:
2247 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002248 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002249 if (index.IsConstant()) {
2250 size_t offset =
2251 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002252 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002253 } else {
2254 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002255 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002256 }
2257 break;
2258 }
2259
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002260 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002261 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002262 if (index.IsConstant()) {
2263 size_t offset =
2264 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002265 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002266 } else {
2267 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002268 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002269 }
2270 break;
2271 }
2272
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002273 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002274 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002275 if (maybe_compressed_char_at) {
2276 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002277 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002278 __ Dext(TMP, TMP, 0, 1);
2279 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2280 "Expecting 0=compressed, 1=uncompressed");
2281 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002282 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002283 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2284 if (maybe_compressed_char_at) {
2285 Mips64Label uncompressed_load, done;
2286 __ Bnezc(TMP, &uncompressed_load);
2287 __ LoadFromOffset(kLoadUnsignedByte,
2288 out,
2289 obj,
2290 data_offset + (const_index << TIMES_1));
2291 __ Bc(&done);
2292 __ Bind(&uncompressed_load);
2293 __ LoadFromOffset(kLoadUnsignedHalfword,
2294 out,
2295 obj,
2296 data_offset + (const_index << TIMES_2));
2297 __ Bind(&done);
2298 } else {
2299 __ LoadFromOffset(kLoadUnsignedHalfword,
2300 out,
2301 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002302 data_offset + (const_index << TIMES_2),
2303 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002304 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002305 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002306 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2307 if (maybe_compressed_char_at) {
2308 Mips64Label uncompressed_load, done;
2309 __ Bnezc(TMP, &uncompressed_load);
2310 __ Daddu(TMP, obj, index_reg);
2311 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2312 __ Bc(&done);
2313 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002314 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002315 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2316 __ Bind(&done);
2317 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002318 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002319 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002320 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002321 }
2322 break;
2323 }
2324
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002325 case DataType::Type::kInt16: {
2326 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2327 if (index.IsConstant()) {
2328 size_t offset =
2329 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2330 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2331 } else {
2332 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2333 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2334 }
2335 break;
2336 }
2337
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002338 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002339 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002340 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002341 LoadOperandType load_type =
2342 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002343 if (index.IsConstant()) {
2344 size_t offset =
2345 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002346 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002347 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002348 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002349 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002350 }
2351 break;
2352 }
2353
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002354 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002355 static_assert(
2356 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2357 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2358 // /* HeapReference<Object> */ out =
2359 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2360 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002361 bool temp_needed = index.IsConstant()
2362 ? !kBakerReadBarrierThunksEnableForFields
2363 : !kBakerReadBarrierThunksEnableForArrays;
2364 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002365 // Note that a potential implicit null check is handled in this
2366 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002367 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2368 if (index.IsConstant()) {
2369 // Array load with a constant index can be treated as a field load.
2370 size_t offset =
2371 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2372 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2373 out_loc,
2374 obj,
2375 offset,
2376 temp,
2377 /* needs_null_check */ false);
2378 } else {
2379 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2380 out_loc,
2381 obj,
2382 data_offset,
2383 index,
2384 temp,
2385 /* needs_null_check */ false);
2386 }
Alexey Frunze15958152017-02-09 19:08:30 -08002387 } else {
2388 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2389 if (index.IsConstant()) {
2390 size_t offset =
2391 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2392 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2393 // If read barriers are enabled, emit read barriers other than
2394 // Baker's using a slow path (and also unpoison the loaded
2395 // reference, if heap poisoning is enabled).
2396 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2397 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002398 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002399 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2400 // If read barriers are enabled, emit read barriers other than
2401 // Baker's using a slow path (and also unpoison the loaded
2402 // reference, if heap poisoning is enabled).
2403 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2404 out_loc,
2405 out_loc,
2406 obj_loc,
2407 data_offset,
2408 index);
2409 }
2410 }
2411 break;
2412 }
2413
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002414 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002415 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002416 if (index.IsConstant()) {
2417 size_t offset =
2418 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002419 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002420 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002421 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002422 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002423 }
2424 break;
2425 }
2426
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002427 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002428 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002429 if (index.IsConstant()) {
2430 size_t offset =
2431 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002432 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002433 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002434 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002435 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002436 }
2437 break;
2438 }
2439
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002440 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002441 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002442 if (index.IsConstant()) {
2443 size_t offset =
2444 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002445 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002446 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002447 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002448 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002449 }
2450 break;
2451 }
2452
Aart Bik66c158e2018-01-31 12:55:04 -08002453 case DataType::Type::kUint32:
2454 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002455 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002456 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2457 UNREACHABLE();
2458 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002459}
2460
2461void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002462 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002463 locations->SetInAt(0, Location::RequiresRegister());
2464 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2465}
2466
2467void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2468 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002469 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002470 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2471 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2472 __ LoadFromOffset(kLoadWord, out, obj, offset);
2473 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002474 // Mask out compression flag from String's array length.
2475 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2476 __ Srl(out, out, 1u);
2477 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002478}
2479
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002480Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2481 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2482 ? Location::ConstantLocation(instruction->AsConstant())
2483 : Location::RequiresRegister();
2484}
2485
2486Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2487 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2488 // We can store a non-zero float or double constant without first loading it into the FPU,
2489 // but we should only prefer this if the constant has a single use.
2490 if (instruction->IsConstant() &&
2491 (instruction->AsConstant()->IsZeroBitPattern() ||
2492 instruction->GetUses().HasExactlyOneElement())) {
2493 return Location::ConstantLocation(instruction->AsConstant());
2494 // Otherwise fall through and require an FPU register for the constant.
2495 }
2496 return Location::RequiresFpuRegister();
2497}
2498
Alexey Frunze4dda3372015-06-01 18:31:49 -07002499void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002500 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002501
2502 bool needs_write_barrier =
2503 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2504 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2505
Vladimir Markoca6fff82017-10-03 14:49:14 +01002506 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002507 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002508 may_need_runtime_call_for_type_check ?
2509 LocationSummary::kCallOnSlowPath :
2510 LocationSummary::kNoCall);
2511
2512 locations->SetInAt(0, Location::RequiresRegister());
2513 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002514 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002515 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002516 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002517 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2518 }
2519 if (needs_write_barrier) {
2520 // Temporary register for the write barrier.
2521 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002522 }
2523}
2524
2525void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2526 LocationSummary* locations = instruction->GetLocations();
2527 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2528 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002529 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002530 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002531 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002532 bool needs_write_barrier =
2533 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002534 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002535 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002536
2537 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002538 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002539 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002540 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002541 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002542 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002543 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002544 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002545 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2546 }
2547 if (value_location.IsConstant()) {
2548 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2549 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2550 } else {
2551 GpuRegister value = value_location.AsRegister<GpuRegister>();
2552 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002553 }
2554 break;
2555 }
2556
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002557 case DataType::Type::kUint16:
2558 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002559 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002560 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002561 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002562 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002563 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002564 }
2565 if (value_location.IsConstant()) {
2566 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2567 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2568 } else {
2569 GpuRegister value = value_location.AsRegister<GpuRegister>();
2570 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002571 }
2572 break;
2573 }
2574
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002575 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002576 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2577 if (index.IsConstant()) {
2578 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2579 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002580 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002581 }
2582 if (value_location.IsConstant()) {
2583 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2584 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2585 } else {
2586 GpuRegister value = value_location.AsRegister<GpuRegister>();
2587 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2588 }
2589 break;
2590 }
2591
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002592 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002593 if (value_location.IsConstant()) {
2594 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002595 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002596 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002597 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002598 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002599 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002600 }
Alexey Frunze15958152017-02-09 19:08:30 -08002601 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2602 DCHECK_EQ(value, 0);
2603 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2604 DCHECK(!needs_write_barrier);
2605 DCHECK(!may_need_runtime_call_for_type_check);
2606 break;
2607 }
2608
2609 DCHECK(needs_write_barrier);
2610 GpuRegister value = value_location.AsRegister<GpuRegister>();
2611 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2612 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2613 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2614 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2615 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2616 Mips64Label done;
2617 SlowPathCodeMIPS64* slow_path = nullptr;
2618
2619 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002620 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002621 codegen_->AddSlowPath(slow_path);
2622 if (instruction->GetValueCanBeNull()) {
2623 Mips64Label non_zero;
2624 __ Bnezc(value, &non_zero);
2625 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2626 if (index.IsConstant()) {
2627 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002628 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002629 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002630 }
Alexey Frunze15958152017-02-09 19:08:30 -08002631 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2632 __ Bc(&done);
2633 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002634 }
Alexey Frunze15958152017-02-09 19:08:30 -08002635
2636 // Note that when read barriers are enabled, the type checks
2637 // are performed without read barriers. This is fine, even in
2638 // the case where a class object is in the from-space after
2639 // the flip, as a comparison involving such a type would not
2640 // produce a false positive; it may of course produce a false
2641 // negative, in which case we would take the ArraySet slow
2642 // path.
2643
2644 // /* HeapReference<Class> */ temp1 = obj->klass_
2645 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2646 __ MaybeUnpoisonHeapReference(temp1);
2647
2648 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2649 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2650 // /* HeapReference<Class> */ temp2 = value->klass_
2651 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2652 // If heap poisoning is enabled, no need to unpoison `temp1`
2653 // nor `temp2`, as we are comparing two poisoned references.
2654
2655 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2656 Mips64Label do_put;
2657 __ Beqc(temp1, temp2, &do_put);
2658 // If heap poisoning is enabled, the `temp1` reference has
2659 // not been unpoisoned yet; unpoison it now.
2660 __ MaybeUnpoisonHeapReference(temp1);
2661
2662 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2663 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2664 // If heap poisoning is enabled, no need to unpoison
2665 // `temp1`, as we are comparing against null below.
2666 __ Bnezc(temp1, slow_path->GetEntryLabel());
2667 __ Bind(&do_put);
2668 } else {
2669 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2670 }
2671 }
2672
2673 GpuRegister source = value;
2674 if (kPoisonHeapReferences) {
2675 // Note that in the case where `value` is a null reference,
2676 // we do not enter this block, as a null reference does not
2677 // need poisoning.
2678 __ Move(temp1, value);
2679 __ PoisonHeapReference(temp1);
2680 source = temp1;
2681 }
2682
2683 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2684 if (index.IsConstant()) {
2685 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002686 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002687 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002688 }
2689 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2690
2691 if (!may_need_runtime_call_for_type_check) {
2692 codegen_->MaybeRecordImplicitNullCheck(instruction);
2693 }
2694
2695 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2696
2697 if (done.IsLinked()) {
2698 __ Bind(&done);
2699 }
2700
2701 if (slow_path != nullptr) {
2702 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002703 }
2704 break;
2705 }
2706
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002707 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002708 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002709 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002710 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002711 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002712 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002713 }
2714 if (value_location.IsConstant()) {
2715 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2716 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2717 } else {
2718 GpuRegister value = value_location.AsRegister<GpuRegister>();
2719 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002720 }
2721 break;
2722 }
2723
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002724 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002725 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002726 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002727 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002728 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002729 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002730 }
2731 if (value_location.IsConstant()) {
2732 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2733 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2734 } else {
2735 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2736 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002737 }
2738 break;
2739 }
2740
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002741 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002742 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002743 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002744 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002745 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002746 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002747 }
2748 if (value_location.IsConstant()) {
2749 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2750 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2751 } else {
2752 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2753 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002754 }
2755 break;
2756 }
2757
Aart Bik66c158e2018-01-31 12:55:04 -08002758 case DataType::Type::kUint32:
2759 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002760 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002761 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2762 UNREACHABLE();
2763 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002764}
2765
2766void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002767 RegisterSet caller_saves = RegisterSet::Empty();
2768 InvokeRuntimeCallingConvention calling_convention;
2769 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2770 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2771 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002772
2773 HInstruction* index = instruction->InputAt(0);
2774 HInstruction* length = instruction->InputAt(1);
2775
2776 bool const_index = false;
2777 bool const_length = false;
2778
2779 if (index->IsConstant()) {
2780 if (length->IsConstant()) {
2781 const_index = true;
2782 const_length = true;
2783 } else {
2784 int32_t index_value = index->AsIntConstant()->GetValue();
2785 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2786 const_index = true;
2787 }
2788 }
2789 } else if (length->IsConstant()) {
2790 int32_t length_value = length->AsIntConstant()->GetValue();
2791 if (IsUint<15>(length_value)) {
2792 const_length = true;
2793 }
2794 }
2795
2796 locations->SetInAt(0, const_index
2797 ? Location::ConstantLocation(index->AsConstant())
2798 : Location::RequiresRegister());
2799 locations->SetInAt(1, const_length
2800 ? Location::ConstantLocation(length->AsConstant())
2801 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002802}
2803
2804void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2805 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002806 Location index_loc = locations->InAt(0);
2807 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002808
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002809 if (length_loc.IsConstant()) {
2810 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2811 if (index_loc.IsConstant()) {
2812 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2813 if (index < 0 || index >= length) {
2814 BoundsCheckSlowPathMIPS64* slow_path =
2815 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2816 codegen_->AddSlowPath(slow_path);
2817 __ Bc(slow_path->GetEntryLabel());
2818 } else {
2819 // Nothing to be done.
2820 }
2821 return;
2822 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002823
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002824 BoundsCheckSlowPathMIPS64* slow_path =
2825 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2826 codegen_->AddSlowPath(slow_path);
2827 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2828 if (length == 0) {
2829 __ Bc(slow_path->GetEntryLabel());
2830 } else if (length == 1) {
2831 __ Bnezc(index, slow_path->GetEntryLabel());
2832 } else {
2833 DCHECK(IsUint<15>(length)) << length;
2834 __ Sltiu(TMP, index, length);
2835 __ Beqzc(TMP, slow_path->GetEntryLabel());
2836 }
2837 } else {
2838 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2839 BoundsCheckSlowPathMIPS64* slow_path =
2840 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2841 codegen_->AddSlowPath(slow_path);
2842 if (index_loc.IsConstant()) {
2843 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2844 if (index < 0) {
2845 __ Bc(slow_path->GetEntryLabel());
2846 } else if (index == 0) {
2847 __ Blezc(length, slow_path->GetEntryLabel());
2848 } else {
2849 DCHECK(IsInt<16>(index + 1)) << index;
2850 __ Sltiu(TMP, length, index + 1);
2851 __ Bnezc(TMP, slow_path->GetEntryLabel());
2852 }
2853 } else {
2854 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2855 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2856 }
2857 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002858}
2859
Alexey Frunze15958152017-02-09 19:08:30 -08002860// Temp is used for read barrier.
2861static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2862 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002863 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002864 (kUseBakerReadBarrier ||
2865 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2866 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2867 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2868 return 1;
2869 }
2870 return 0;
2871}
2872
2873// Extra temp is used for read barrier.
2874static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2875 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2876}
2877
Alexey Frunze4dda3372015-06-01 18:31:49 -07002878void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002879 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002880 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002881 LocationSummary* locations =
2882 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002883 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00002884 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
2885 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
2886 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
2887 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
2888 } else {
2889 locations->SetInAt(1, Location::RequiresRegister());
2890 }
Alexey Frunze15958152017-02-09 19:08:30 -08002891 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002892}
2893
2894void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002895 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002896 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002897 Location obj_loc = locations->InAt(0);
2898 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00002899 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08002900 Location temp_loc = locations->GetTemp(0);
2901 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2902 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2903 DCHECK_LE(num_temps, 2u);
2904 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002905 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2906 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2907 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2908 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2909 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2910 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2911 const uint32_t object_array_data_offset =
2912 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2913 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002914
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002915 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002916 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002917 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2918 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002919 codegen_->AddSlowPath(slow_path);
2920
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002921 // Avoid this check if we know `obj` is not null.
2922 if (instruction->MustDoNullCheck()) {
2923 __ Beqzc(obj, &done);
2924 }
2925
2926 switch (type_check_kind) {
2927 case TypeCheckKind::kExactCheck:
2928 case TypeCheckKind::kArrayCheck: {
2929 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002930 GenerateReferenceLoadTwoRegisters(instruction,
2931 temp_loc,
2932 obj_loc,
2933 class_offset,
2934 maybe_temp2_loc,
2935 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002936 // Jump to slow path for throwing the exception or doing a
2937 // more involved array check.
Vladimir Marko175e7862018-03-27 09:03:13 +00002938 __ Bnec(temp, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002939 break;
2940 }
2941
2942 case TypeCheckKind::kAbstractClassCheck: {
2943 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002944 GenerateReferenceLoadTwoRegisters(instruction,
2945 temp_loc,
2946 obj_loc,
2947 class_offset,
2948 maybe_temp2_loc,
2949 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002950 // If the class is abstract, we eagerly fetch the super class of the
2951 // object to avoid doing a comparison we know will fail.
2952 Mips64Label loop;
2953 __ Bind(&loop);
2954 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002955 GenerateReferenceLoadOneRegister(instruction,
2956 temp_loc,
2957 super_offset,
2958 maybe_temp2_loc,
2959 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002960 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2961 // exception.
2962 __ Beqzc(temp, slow_path->GetEntryLabel());
2963 // Otherwise, compare the classes.
Vladimir Marko175e7862018-03-27 09:03:13 +00002964 __ Bnec(temp, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002965 break;
2966 }
2967
2968 case TypeCheckKind::kClassHierarchyCheck: {
2969 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002970 GenerateReferenceLoadTwoRegisters(instruction,
2971 temp_loc,
2972 obj_loc,
2973 class_offset,
2974 maybe_temp2_loc,
2975 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002976 // Walk over the class hierarchy to find a match.
2977 Mips64Label loop;
2978 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00002979 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002980 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002981 GenerateReferenceLoadOneRegister(instruction,
2982 temp_loc,
2983 super_offset,
2984 maybe_temp2_loc,
2985 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002986 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2987 // exception. Otherwise, jump to the beginning of the loop.
2988 __ Bnezc(temp, &loop);
2989 __ Bc(slow_path->GetEntryLabel());
2990 break;
2991 }
2992
2993 case TypeCheckKind::kArrayObjectCheck: {
2994 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002995 GenerateReferenceLoadTwoRegisters(instruction,
2996 temp_loc,
2997 obj_loc,
2998 class_offset,
2999 maybe_temp2_loc,
3000 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003001 // Do an exact check.
Vladimir Marko175e7862018-03-27 09:03:13 +00003002 __ Beqc(temp, cls.AsRegister<GpuRegister>(), &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003003 // Otherwise, we need to check that the object's class is a non-primitive array.
3004 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003005 GenerateReferenceLoadOneRegister(instruction,
3006 temp_loc,
3007 component_offset,
3008 maybe_temp2_loc,
3009 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003010 // If the component type is null, jump to the slow path to throw the exception.
3011 __ Beqzc(temp, slow_path->GetEntryLabel());
3012 // Otherwise, the object is indeed an array, further check that this component
3013 // type is not a primitive type.
3014 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3015 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3016 __ Bnezc(temp, slow_path->GetEntryLabel());
3017 break;
3018 }
3019
3020 case TypeCheckKind::kUnresolvedCheck:
3021 // We always go into the type check slow path for the unresolved check case.
3022 // We cannot directly call the CheckCast runtime entry point
3023 // without resorting to a type checking slow path here (i.e. by
3024 // calling InvokeRuntime directly), as it would require to
3025 // assign fixed registers for the inputs of this HInstanceOf
3026 // instruction (following the runtime calling convention), which
3027 // might be cluttered by the potential first read barrier
3028 // emission at the beginning of this method.
3029 __ Bc(slow_path->GetEntryLabel());
3030 break;
3031
3032 case TypeCheckKind::kInterfaceCheck: {
3033 // Avoid read barriers to improve performance of the fast path. We can not get false
3034 // positives by doing this.
3035 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003036 GenerateReferenceLoadTwoRegisters(instruction,
3037 temp_loc,
3038 obj_loc,
3039 class_offset,
3040 maybe_temp2_loc,
3041 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003042 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003043 GenerateReferenceLoadTwoRegisters(instruction,
3044 temp_loc,
3045 temp_loc,
3046 iftable_offset,
3047 maybe_temp2_loc,
3048 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003049 // Iftable is never null.
3050 __ Lw(TMP, temp, array_length_offset);
3051 // Loop through the iftable and check if any class matches.
3052 Mips64Label loop;
3053 __ Bind(&loop);
3054 __ Beqzc(TMP, slow_path->GetEntryLabel());
3055 __ Lwu(AT, temp, object_array_data_offset);
3056 __ MaybeUnpoisonHeapReference(AT);
3057 // Go to next interface.
3058 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3059 __ Addiu(TMP, TMP, -2);
3060 // Compare the classes and continue the loop if they do not match.
Vladimir Marko175e7862018-03-27 09:03:13 +00003061 __ Bnec(AT, cls.AsRegister<GpuRegister>(), &loop);
3062 break;
3063 }
3064
3065 case TypeCheckKind::kBitstringCheck: {
3066 // /* HeapReference<Class> */ temp = obj->klass_
3067 GenerateReferenceLoadTwoRegisters(instruction,
3068 temp_loc,
3069 obj_loc,
3070 class_offset,
3071 maybe_temp2_loc,
3072 kWithoutReadBarrier);
3073
3074 GenerateBitstringTypeCheckCompare(instruction, temp);
3075 __ Bnezc(temp, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003076 break;
3077 }
3078 }
3079
3080 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003081 __ Bind(slow_path->GetExitLabel());
3082}
3083
3084void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3085 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003086 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003087 locations->SetInAt(0, Location::RequiresRegister());
3088 if (check->HasUses()) {
3089 locations->SetOut(Location::SameAsFirstInput());
3090 }
3091}
3092
3093void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3094 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003095 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze4dda3372015-06-01 18:31:49 -07003096 check->GetLoadClass(),
3097 check,
3098 check->GetDexPc(),
3099 true);
3100 codegen_->AddSlowPath(slow_path);
3101 GenerateClassInitializationCheck(slow_path,
3102 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3103}
3104
3105void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003106 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003107
Vladimir Markoca6fff82017-10-03 14:49:14 +01003108 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003109
3110 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003111 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003112 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003113 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003114 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003115 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003116 case DataType::Type::kInt32:
3117 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003118 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003119 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003120 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3121 break;
3122
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003123 case DataType::Type::kFloat32:
3124 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003125 locations->SetInAt(0, Location::RequiresFpuRegister());
3126 locations->SetInAt(1, Location::RequiresFpuRegister());
3127 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003128 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003129
3130 default:
3131 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3132 }
3133}
3134
3135void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3136 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003137 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003138 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003139
3140 // 0 if: left == right
3141 // 1 if: left > right
3142 // -1 if: left < right
3143 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003144 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003145 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003146 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003147 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003148 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003149 case DataType::Type::kInt32:
3150 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003151 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003152 Location rhs_location = locations->InAt(1);
3153 bool use_imm = rhs_location.IsConstant();
3154 GpuRegister rhs = ZERO;
3155 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003156 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003157 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3158 if (value != 0) {
3159 rhs = AT;
3160 __ LoadConst64(rhs, value);
3161 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003162 } else {
3163 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3164 if (value != 0) {
3165 rhs = AT;
3166 __ LoadConst32(rhs, value);
3167 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003168 }
3169 } else {
3170 rhs = rhs_location.AsRegister<GpuRegister>();
3171 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003172 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003173 __ Slt(res, rhs, lhs);
3174 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003175 break;
3176 }
3177
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003178 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003179 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3180 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3181 Mips64Label done;
3182 __ CmpEqS(FTMP, lhs, rhs);
3183 __ LoadConst32(res, 0);
3184 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003185 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003186 __ CmpLtS(FTMP, lhs, rhs);
3187 __ LoadConst32(res, -1);
3188 __ Bc1nez(FTMP, &done);
3189 __ LoadConst32(res, 1);
3190 } else {
3191 __ CmpLtS(FTMP, rhs, lhs);
3192 __ LoadConst32(res, 1);
3193 __ Bc1nez(FTMP, &done);
3194 __ LoadConst32(res, -1);
3195 }
3196 __ Bind(&done);
3197 break;
3198 }
3199
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003200 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003201 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3202 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3203 Mips64Label done;
3204 __ CmpEqD(FTMP, lhs, rhs);
3205 __ LoadConst32(res, 0);
3206 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003207 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003208 __ CmpLtD(FTMP, lhs, rhs);
3209 __ LoadConst32(res, -1);
3210 __ Bc1nez(FTMP, &done);
3211 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003212 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003213 __ CmpLtD(FTMP, rhs, lhs);
3214 __ LoadConst32(res, 1);
3215 __ Bc1nez(FTMP, &done);
3216 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003217 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003218 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003219 break;
3220 }
3221
3222 default:
3223 LOG(FATAL) << "Unimplemented compare type " << in_type;
3224 }
3225}
3226
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003227void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003228 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003229 switch (instruction->InputAt(0)->GetType()) {
3230 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003231 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003232 locations->SetInAt(0, Location::RequiresRegister());
3233 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3234 break;
3235
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003236 case DataType::Type::kFloat32:
3237 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003238 locations->SetInAt(0, Location::RequiresFpuRegister());
3239 locations->SetInAt(1, Location::RequiresFpuRegister());
3240 break;
3241 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003242 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003243 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3244 }
3245}
3246
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003247void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003248 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003249 return;
3250 }
3251
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003252 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003253 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003254 switch (type) {
3255 default:
3256 // Integer case.
3257 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3258 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003259 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003260 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3261 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003262 case DataType::Type::kFloat32:
3263 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003264 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3265 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003266 }
3267}
3268
Alexey Frunzec857c742015-09-23 15:12:39 -07003269void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3270 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003271 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003272
3273 LocationSummary* locations = instruction->GetLocations();
3274 Location second = locations->InAt(1);
3275 DCHECK(second.IsConstant());
3276
3277 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3278 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3279 int64_t imm = Int64FromConstant(second.GetConstant());
3280 DCHECK(imm == 1 || imm == -1);
3281
3282 if (instruction->IsRem()) {
3283 __ Move(out, ZERO);
3284 } else {
3285 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003286 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003287 __ Subu(out, ZERO, dividend);
3288 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003289 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003290 __ Dsubu(out, ZERO, dividend);
3291 }
3292 } else if (out != dividend) {
3293 __ Move(out, dividend);
3294 }
3295 }
3296}
3297
3298void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3299 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003300 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003301
3302 LocationSummary* locations = instruction->GetLocations();
3303 Location second = locations->InAt(1);
3304 DCHECK(second.IsConstant());
3305
3306 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3307 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3308 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003309 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003310 int ctz_imm = CTZ(abs_imm);
3311
3312 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003313 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003314 if (ctz_imm == 1) {
3315 // Fast path for division by +/-2, which is very common.
3316 __ Srl(TMP, dividend, 31);
3317 } else {
3318 __ Sra(TMP, dividend, 31);
3319 __ Srl(TMP, TMP, 32 - ctz_imm);
3320 }
3321 __ Addu(out, dividend, TMP);
3322 __ Sra(out, out, ctz_imm);
3323 if (imm < 0) {
3324 __ Subu(out, ZERO, out);
3325 }
3326 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003327 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003328 if (ctz_imm == 1) {
3329 // Fast path for division by +/-2, which is very common.
3330 __ Dsrl32(TMP, dividend, 31);
3331 } else {
3332 __ Dsra32(TMP, dividend, 31);
3333 if (ctz_imm > 32) {
3334 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3335 } else {
3336 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3337 }
3338 }
3339 __ Daddu(out, dividend, TMP);
3340 if (ctz_imm < 32) {
3341 __ Dsra(out, out, ctz_imm);
3342 } else {
3343 __ Dsra32(out, out, ctz_imm - 32);
3344 }
3345 if (imm < 0) {
3346 __ Dsubu(out, ZERO, out);
3347 }
3348 }
3349 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003350 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003351 if (ctz_imm == 1) {
3352 // Fast path for modulo +/-2, which is very common.
3353 __ Sra(TMP, dividend, 31);
3354 __ Subu(out, dividend, TMP);
3355 __ Andi(out, out, 1);
3356 __ Addu(out, out, TMP);
3357 } else {
3358 __ Sra(TMP, dividend, 31);
3359 __ Srl(TMP, TMP, 32 - ctz_imm);
3360 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003361 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003362 __ Subu(out, out, TMP);
3363 }
3364 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003365 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003366 if (ctz_imm == 1) {
3367 // Fast path for modulo +/-2, which is very common.
3368 __ Dsra32(TMP, dividend, 31);
3369 __ Dsubu(out, dividend, TMP);
3370 __ Andi(out, out, 1);
3371 __ Daddu(out, out, TMP);
3372 } else {
3373 __ Dsra32(TMP, dividend, 31);
3374 if (ctz_imm > 32) {
3375 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3376 } else {
3377 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3378 }
3379 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003380 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003381 __ Dsubu(out, out, TMP);
3382 }
3383 }
3384 }
3385}
3386
3387void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3388 DCHECK(instruction->IsDiv() || instruction->IsRem());
3389
3390 LocationSummary* locations = instruction->GetLocations();
3391 Location second = locations->InAt(1);
3392 DCHECK(second.IsConstant());
3393
3394 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3395 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3396 int64_t imm = Int64FromConstant(second.GetConstant());
3397
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003398 DataType::Type type = instruction->GetResultType();
3399 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003400
3401 int64_t magic;
3402 int shift;
3403 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003404 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003405 &magic,
3406 &shift);
3407
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003408 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003409 __ LoadConst32(TMP, magic);
3410 __ MuhR6(TMP, dividend, TMP);
3411
3412 if (imm > 0 && magic < 0) {
3413 __ Addu(TMP, TMP, dividend);
3414 } else if (imm < 0 && magic > 0) {
3415 __ Subu(TMP, TMP, dividend);
3416 }
3417
3418 if (shift != 0) {
3419 __ Sra(TMP, TMP, shift);
3420 }
3421
3422 if (instruction->IsDiv()) {
3423 __ Sra(out, TMP, 31);
3424 __ Subu(out, TMP, out);
3425 } else {
3426 __ Sra(AT, TMP, 31);
3427 __ Subu(AT, TMP, AT);
3428 __ LoadConst32(TMP, imm);
3429 __ MulR6(TMP, AT, TMP);
3430 __ Subu(out, dividend, TMP);
3431 }
3432 } else {
3433 __ LoadConst64(TMP, magic);
3434 __ Dmuh(TMP, dividend, TMP);
3435
3436 if (imm > 0 && magic < 0) {
3437 __ Daddu(TMP, TMP, dividend);
3438 } else if (imm < 0 && magic > 0) {
3439 __ Dsubu(TMP, TMP, dividend);
3440 }
3441
3442 if (shift >= 32) {
3443 __ Dsra32(TMP, TMP, shift - 32);
3444 } else if (shift > 0) {
3445 __ Dsra(TMP, TMP, shift);
3446 }
3447
3448 if (instruction->IsDiv()) {
3449 __ Dsra32(out, TMP, 31);
3450 __ Dsubu(out, TMP, out);
3451 } else {
3452 __ Dsra32(AT, TMP, 31);
3453 __ Dsubu(AT, TMP, AT);
3454 __ LoadConst64(TMP, imm);
3455 __ Dmul(TMP, AT, TMP);
3456 __ Dsubu(out, dividend, TMP);
3457 }
3458 }
3459}
3460
3461void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3462 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003463 DataType::Type type = instruction->GetResultType();
3464 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003465
3466 LocationSummary* locations = instruction->GetLocations();
3467 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3468 Location second = locations->InAt(1);
3469
3470 if (second.IsConstant()) {
3471 int64_t imm = Int64FromConstant(second.GetConstant());
3472 if (imm == 0) {
3473 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3474 } else if (imm == 1 || imm == -1) {
3475 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003476 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003477 DivRemByPowerOfTwo(instruction);
3478 } else {
3479 DCHECK(imm <= -2 || imm >= 2);
3480 GenerateDivRemWithAnyConstant(instruction);
3481 }
3482 } else {
3483 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3484 GpuRegister divisor = second.AsRegister<GpuRegister>();
3485 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003486 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003487 __ DivR6(out, dividend, divisor);
3488 else
3489 __ Ddiv(out, dividend, divisor);
3490 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003491 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003492 __ ModR6(out, dividend, divisor);
3493 else
3494 __ Dmod(out, dividend, divisor);
3495 }
3496 }
3497}
3498
Alexey Frunze4dda3372015-06-01 18:31:49 -07003499void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3500 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003501 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003502 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003503 case DataType::Type::kInt32:
3504 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003505 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003506 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003507 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3508 break;
3509
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003510 case DataType::Type::kFloat32:
3511 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003512 locations->SetInAt(0, Location::RequiresFpuRegister());
3513 locations->SetInAt(1, Location::RequiresFpuRegister());
3514 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3515 break;
3516
3517 default:
3518 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3519 }
3520}
3521
3522void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003523 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003524 LocationSummary* locations = instruction->GetLocations();
3525
3526 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003527 case DataType::Type::kInt32:
3528 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003529 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003530 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003531 case DataType::Type::kFloat32:
3532 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003533 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3534 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3535 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003536 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003537 __ DivS(dst, lhs, rhs);
3538 else
3539 __ DivD(dst, lhs, rhs);
3540 break;
3541 }
3542 default:
3543 LOG(FATAL) << "Unexpected div type " << type;
3544 }
3545}
3546
3547void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003548 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003549 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003550}
3551
3552void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3553 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003554 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003555 codegen_->AddSlowPath(slow_path);
3556 Location value = instruction->GetLocations()->InAt(0);
3557
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003558 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003559
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003560 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003561 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003562 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003563 }
3564
3565 if (value.IsConstant()) {
3566 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3567 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003568 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003569 } else {
3570 // A division by a non-null constant is valid. We don't need to perform
3571 // any check, so simply fall through.
3572 }
3573 } else {
3574 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3575 }
3576}
3577
3578void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3579 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003580 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003581 locations->SetOut(Location::ConstantLocation(constant));
3582}
3583
3584void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3585 // Will be generated at use site.
3586}
3587
3588void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3589 exit->SetLocations(nullptr);
3590}
3591
3592void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3593}
3594
3595void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3596 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003597 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003598 locations->SetOut(Location::ConstantLocation(constant));
3599}
3600
3601void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3602 // Will be generated at use site.
3603}
3604
David Brazdilfc6a86a2015-06-26 10:33:45 +00003605void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003606 if (successor->IsExitBlock()) {
3607 DCHECK(got->GetPrevious()->AlwaysThrows());
3608 return; // no code needed
3609 }
3610
Alexey Frunze4dda3372015-06-01 18:31:49 -07003611 HBasicBlock* block = got->GetBlock();
3612 HInstruction* previous = got->GetPrevious();
3613 HLoopInformation* info = block->GetLoopInformation();
3614
3615 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Goran Jakovljevicfeec1672018-02-08 10:20:14 +01003616 if (codegen_->GetCompilerOptions().CountHotnessInCompiledCode()) {
3617 __ Ld(AT, SP, kCurrentMethodStackOffset);
3618 __ Lhu(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3619 __ Addiu(TMP, TMP, 1);
3620 __ Sh(TMP, AT, ArtMethod::HotnessCountOffset().Int32Value());
3621 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003622 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3623 return;
3624 }
3625 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3626 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3627 }
3628 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003629 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003630 }
3631}
3632
David Brazdilfc6a86a2015-06-26 10:33:45 +00003633void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3634 got->SetLocations(nullptr);
3635}
3636
3637void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3638 HandleGoto(got, got->GetSuccessor());
3639}
3640
3641void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3642 try_boundary->SetLocations(nullptr);
3643}
3644
3645void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3646 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3647 if (!successor->IsExitBlock()) {
3648 HandleGoto(try_boundary, successor);
3649 }
3650}
3651
Alexey Frunze299a9392015-12-08 16:08:02 -08003652void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3653 bool is64bit,
3654 LocationSummary* locations) {
3655 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3656 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3657 Location rhs_location = locations->InAt(1);
3658 GpuRegister rhs_reg = ZERO;
3659 int64_t rhs_imm = 0;
3660 bool use_imm = rhs_location.IsConstant();
3661 if (use_imm) {
3662 if (is64bit) {
3663 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3664 } else {
3665 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3666 }
3667 } else {
3668 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3669 }
3670 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3671
3672 switch (cond) {
3673 case kCondEQ:
3674 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003675 if (use_imm && IsInt<16>(-rhs_imm)) {
3676 if (rhs_imm == 0) {
3677 if (cond == kCondEQ) {
3678 __ Sltiu(dst, lhs, 1);
3679 } else {
3680 __ Sltu(dst, ZERO, lhs);
3681 }
3682 } else {
3683 if (is64bit) {
3684 __ Daddiu(dst, lhs, -rhs_imm);
3685 } else {
3686 __ Addiu(dst, lhs, -rhs_imm);
3687 }
3688 if (cond == kCondEQ) {
3689 __ Sltiu(dst, dst, 1);
3690 } else {
3691 __ Sltu(dst, ZERO, dst);
3692 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003693 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003694 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003695 if (use_imm && IsUint<16>(rhs_imm)) {
3696 __ Xori(dst, lhs, rhs_imm);
3697 } else {
3698 if (use_imm) {
3699 rhs_reg = TMP;
3700 __ LoadConst64(rhs_reg, rhs_imm);
3701 }
3702 __ Xor(dst, lhs, rhs_reg);
3703 }
3704 if (cond == kCondEQ) {
3705 __ Sltiu(dst, dst, 1);
3706 } else {
3707 __ Sltu(dst, ZERO, dst);
3708 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003709 }
3710 break;
3711
3712 case kCondLT:
3713 case kCondGE:
3714 if (use_imm && IsInt<16>(rhs_imm)) {
3715 __ Slti(dst, lhs, rhs_imm);
3716 } else {
3717 if (use_imm) {
3718 rhs_reg = TMP;
3719 __ LoadConst64(rhs_reg, rhs_imm);
3720 }
3721 __ Slt(dst, lhs, rhs_reg);
3722 }
3723 if (cond == kCondGE) {
3724 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3725 // only the slt instruction but no sge.
3726 __ Xori(dst, dst, 1);
3727 }
3728 break;
3729
3730 case kCondLE:
3731 case kCondGT:
3732 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3733 // Simulate lhs <= rhs via lhs < rhs + 1.
3734 __ Slti(dst, lhs, rhs_imm_plus_one);
3735 if (cond == kCondGT) {
3736 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3737 // only the slti instruction but no sgti.
3738 __ Xori(dst, dst, 1);
3739 }
3740 } else {
3741 if (use_imm) {
3742 rhs_reg = TMP;
3743 __ LoadConst64(rhs_reg, rhs_imm);
3744 }
3745 __ Slt(dst, rhs_reg, lhs);
3746 if (cond == kCondLE) {
3747 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3748 // only the slt instruction but no sle.
3749 __ Xori(dst, dst, 1);
3750 }
3751 }
3752 break;
3753
3754 case kCondB:
3755 case kCondAE:
3756 if (use_imm && IsInt<16>(rhs_imm)) {
3757 // Sltiu sign-extends its 16-bit immediate operand before
3758 // the comparison and thus lets us compare directly with
3759 // unsigned values in the ranges [0, 0x7fff] and
3760 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3761 __ Sltiu(dst, lhs, rhs_imm);
3762 } else {
3763 if (use_imm) {
3764 rhs_reg = TMP;
3765 __ LoadConst64(rhs_reg, rhs_imm);
3766 }
3767 __ Sltu(dst, lhs, rhs_reg);
3768 }
3769 if (cond == kCondAE) {
3770 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3771 // only the sltu instruction but no sgeu.
3772 __ Xori(dst, dst, 1);
3773 }
3774 break;
3775
3776 case kCondBE:
3777 case kCondA:
3778 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3779 // Simulate lhs <= rhs via lhs < rhs + 1.
3780 // Note that this only works if rhs + 1 does not overflow
3781 // to 0, hence the check above.
3782 // Sltiu sign-extends its 16-bit immediate operand before
3783 // the comparison and thus lets us compare directly with
3784 // unsigned values in the ranges [0, 0x7fff] and
3785 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3786 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3787 if (cond == kCondA) {
3788 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3789 // only the sltiu instruction but no sgtiu.
3790 __ Xori(dst, dst, 1);
3791 }
3792 } else {
3793 if (use_imm) {
3794 rhs_reg = TMP;
3795 __ LoadConst64(rhs_reg, rhs_imm);
3796 }
3797 __ Sltu(dst, rhs_reg, lhs);
3798 if (cond == kCondBE) {
3799 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3800 // only the sltu instruction but no sleu.
3801 __ Xori(dst, dst, 1);
3802 }
3803 }
3804 break;
3805 }
3806}
3807
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003808bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3809 bool is64bit,
3810 LocationSummary* input_locations,
3811 GpuRegister dst) {
3812 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3813 Location rhs_location = input_locations->InAt(1);
3814 GpuRegister rhs_reg = ZERO;
3815 int64_t rhs_imm = 0;
3816 bool use_imm = rhs_location.IsConstant();
3817 if (use_imm) {
3818 if (is64bit) {
3819 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3820 } else {
3821 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3822 }
3823 } else {
3824 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3825 }
3826 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3827
3828 switch (cond) {
3829 case kCondEQ:
3830 case kCondNE:
3831 if (use_imm && IsInt<16>(-rhs_imm)) {
3832 if (is64bit) {
3833 __ Daddiu(dst, lhs, -rhs_imm);
3834 } else {
3835 __ Addiu(dst, lhs, -rhs_imm);
3836 }
3837 } else if (use_imm && IsUint<16>(rhs_imm)) {
3838 __ Xori(dst, lhs, rhs_imm);
3839 } else {
3840 if (use_imm) {
3841 rhs_reg = TMP;
3842 __ LoadConst64(rhs_reg, rhs_imm);
3843 }
3844 __ Xor(dst, lhs, rhs_reg);
3845 }
3846 return (cond == kCondEQ);
3847
3848 case kCondLT:
3849 case kCondGE:
3850 if (use_imm && IsInt<16>(rhs_imm)) {
3851 __ Slti(dst, lhs, rhs_imm);
3852 } else {
3853 if (use_imm) {
3854 rhs_reg = TMP;
3855 __ LoadConst64(rhs_reg, rhs_imm);
3856 }
3857 __ Slt(dst, lhs, rhs_reg);
3858 }
3859 return (cond == kCondGE);
3860
3861 case kCondLE:
3862 case kCondGT:
3863 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3864 // Simulate lhs <= rhs via lhs < rhs + 1.
3865 __ Slti(dst, lhs, rhs_imm_plus_one);
3866 return (cond == kCondGT);
3867 } else {
3868 if (use_imm) {
3869 rhs_reg = TMP;
3870 __ LoadConst64(rhs_reg, rhs_imm);
3871 }
3872 __ Slt(dst, rhs_reg, lhs);
3873 return (cond == kCondLE);
3874 }
3875
3876 case kCondB:
3877 case kCondAE:
3878 if (use_imm && IsInt<16>(rhs_imm)) {
3879 // Sltiu sign-extends its 16-bit immediate operand before
3880 // the comparison and thus lets us compare directly with
3881 // unsigned values in the ranges [0, 0x7fff] and
3882 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3883 __ Sltiu(dst, lhs, rhs_imm);
3884 } else {
3885 if (use_imm) {
3886 rhs_reg = TMP;
3887 __ LoadConst64(rhs_reg, rhs_imm);
3888 }
3889 __ Sltu(dst, lhs, rhs_reg);
3890 }
3891 return (cond == kCondAE);
3892
3893 case kCondBE:
3894 case kCondA:
3895 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3896 // Simulate lhs <= rhs via lhs < rhs + 1.
3897 // Note that this only works if rhs + 1 does not overflow
3898 // to 0, hence the check above.
3899 // Sltiu sign-extends its 16-bit immediate operand before
3900 // the comparison and thus lets us compare directly with
3901 // unsigned values in the ranges [0, 0x7fff] and
3902 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3903 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3904 return (cond == kCondA);
3905 } else {
3906 if (use_imm) {
3907 rhs_reg = TMP;
3908 __ LoadConst64(rhs_reg, rhs_imm);
3909 }
3910 __ Sltu(dst, rhs_reg, lhs);
3911 return (cond == kCondBE);
3912 }
3913 }
3914}
3915
Alexey Frunze299a9392015-12-08 16:08:02 -08003916void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3917 bool is64bit,
3918 LocationSummary* locations,
3919 Mips64Label* label) {
3920 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3921 Location rhs_location = locations->InAt(1);
3922 GpuRegister rhs_reg = ZERO;
3923 int64_t rhs_imm = 0;
3924 bool use_imm = rhs_location.IsConstant();
3925 if (use_imm) {
3926 if (is64bit) {
3927 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3928 } else {
3929 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3930 }
3931 } else {
3932 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3933 }
3934
3935 if (use_imm && rhs_imm == 0) {
3936 switch (cond) {
3937 case kCondEQ:
3938 case kCondBE: // <= 0 if zero
3939 __ Beqzc(lhs, label);
3940 break;
3941 case kCondNE:
3942 case kCondA: // > 0 if non-zero
3943 __ Bnezc(lhs, label);
3944 break;
3945 case kCondLT:
3946 __ Bltzc(lhs, label);
3947 break;
3948 case kCondGE:
3949 __ Bgezc(lhs, label);
3950 break;
3951 case kCondLE:
3952 __ Blezc(lhs, label);
3953 break;
3954 case kCondGT:
3955 __ Bgtzc(lhs, label);
3956 break;
3957 case kCondB: // always false
3958 break;
3959 case kCondAE: // always true
3960 __ Bc(label);
3961 break;
3962 }
3963 } else {
3964 if (use_imm) {
3965 rhs_reg = TMP;
3966 __ LoadConst64(rhs_reg, rhs_imm);
3967 }
3968 switch (cond) {
3969 case kCondEQ:
3970 __ Beqc(lhs, rhs_reg, label);
3971 break;
3972 case kCondNE:
3973 __ Bnec(lhs, rhs_reg, label);
3974 break;
3975 case kCondLT:
3976 __ Bltc(lhs, rhs_reg, label);
3977 break;
3978 case kCondGE:
3979 __ Bgec(lhs, rhs_reg, label);
3980 break;
3981 case kCondLE:
3982 __ Bgec(rhs_reg, lhs, label);
3983 break;
3984 case kCondGT:
3985 __ Bltc(rhs_reg, lhs, label);
3986 break;
3987 case kCondB:
3988 __ Bltuc(lhs, rhs_reg, label);
3989 break;
3990 case kCondAE:
3991 __ Bgeuc(lhs, rhs_reg, label);
3992 break;
3993 case kCondBE:
3994 __ Bgeuc(rhs_reg, lhs, label);
3995 break;
3996 case kCondA:
3997 __ Bltuc(rhs_reg, lhs, label);
3998 break;
3999 }
4000 }
4001}
4002
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004003void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
4004 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004005 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004006 LocationSummary* locations) {
4007 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
4008 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4009 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004010 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004011 switch (cond) {
4012 case kCondEQ:
4013 __ CmpEqS(FTMP, lhs, rhs);
4014 __ Mfc1(dst, FTMP);
4015 __ Andi(dst, dst, 1);
4016 break;
4017 case kCondNE:
4018 __ CmpEqS(FTMP, lhs, rhs);
4019 __ Mfc1(dst, FTMP);
4020 __ Addiu(dst, dst, 1);
4021 break;
4022 case kCondLT:
4023 if (gt_bias) {
4024 __ CmpLtS(FTMP, lhs, rhs);
4025 } else {
4026 __ CmpUltS(FTMP, lhs, rhs);
4027 }
4028 __ Mfc1(dst, FTMP);
4029 __ Andi(dst, dst, 1);
4030 break;
4031 case kCondLE:
4032 if (gt_bias) {
4033 __ CmpLeS(FTMP, lhs, rhs);
4034 } else {
4035 __ CmpUleS(FTMP, lhs, rhs);
4036 }
4037 __ Mfc1(dst, FTMP);
4038 __ Andi(dst, dst, 1);
4039 break;
4040 case kCondGT:
4041 if (gt_bias) {
4042 __ CmpUltS(FTMP, rhs, lhs);
4043 } else {
4044 __ CmpLtS(FTMP, rhs, lhs);
4045 }
4046 __ Mfc1(dst, FTMP);
4047 __ Andi(dst, dst, 1);
4048 break;
4049 case kCondGE:
4050 if (gt_bias) {
4051 __ CmpUleS(FTMP, rhs, lhs);
4052 } else {
4053 __ CmpLeS(FTMP, rhs, lhs);
4054 }
4055 __ Mfc1(dst, FTMP);
4056 __ Andi(dst, dst, 1);
4057 break;
4058 default:
4059 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4060 UNREACHABLE();
4061 }
4062 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004063 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01004064 switch (cond) {
4065 case kCondEQ:
4066 __ CmpEqD(FTMP, lhs, rhs);
4067 __ Mfc1(dst, FTMP);
4068 __ Andi(dst, dst, 1);
4069 break;
4070 case kCondNE:
4071 __ CmpEqD(FTMP, lhs, rhs);
4072 __ Mfc1(dst, FTMP);
4073 __ Addiu(dst, dst, 1);
4074 break;
4075 case kCondLT:
4076 if (gt_bias) {
4077 __ CmpLtD(FTMP, lhs, rhs);
4078 } else {
4079 __ CmpUltD(FTMP, lhs, rhs);
4080 }
4081 __ Mfc1(dst, FTMP);
4082 __ Andi(dst, dst, 1);
4083 break;
4084 case kCondLE:
4085 if (gt_bias) {
4086 __ CmpLeD(FTMP, lhs, rhs);
4087 } else {
4088 __ CmpUleD(FTMP, lhs, rhs);
4089 }
4090 __ Mfc1(dst, FTMP);
4091 __ Andi(dst, dst, 1);
4092 break;
4093 case kCondGT:
4094 if (gt_bias) {
4095 __ CmpUltD(FTMP, rhs, lhs);
4096 } else {
4097 __ CmpLtD(FTMP, rhs, lhs);
4098 }
4099 __ Mfc1(dst, FTMP);
4100 __ Andi(dst, dst, 1);
4101 break;
4102 case kCondGE:
4103 if (gt_bias) {
4104 __ CmpUleD(FTMP, rhs, lhs);
4105 } else {
4106 __ CmpLeD(FTMP, rhs, lhs);
4107 }
4108 __ Mfc1(dst, FTMP);
4109 __ Andi(dst, dst, 1);
4110 break;
4111 default:
4112 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4113 UNREACHABLE();
4114 }
4115 }
4116}
4117
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004118bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4119 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004120 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004121 LocationSummary* input_locations,
4122 FpuRegister dst) {
4123 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4124 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004125 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004126 switch (cond) {
4127 case kCondEQ:
4128 __ CmpEqS(dst, lhs, rhs);
4129 return false;
4130 case kCondNE:
4131 __ CmpEqS(dst, lhs, rhs);
4132 return true;
4133 case kCondLT:
4134 if (gt_bias) {
4135 __ CmpLtS(dst, lhs, rhs);
4136 } else {
4137 __ CmpUltS(dst, lhs, rhs);
4138 }
4139 return false;
4140 case kCondLE:
4141 if (gt_bias) {
4142 __ CmpLeS(dst, lhs, rhs);
4143 } else {
4144 __ CmpUleS(dst, lhs, rhs);
4145 }
4146 return false;
4147 case kCondGT:
4148 if (gt_bias) {
4149 __ CmpUltS(dst, rhs, lhs);
4150 } else {
4151 __ CmpLtS(dst, rhs, lhs);
4152 }
4153 return false;
4154 case kCondGE:
4155 if (gt_bias) {
4156 __ CmpUleS(dst, rhs, lhs);
4157 } else {
4158 __ CmpLeS(dst, rhs, lhs);
4159 }
4160 return false;
4161 default:
4162 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4163 UNREACHABLE();
4164 }
4165 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004166 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004167 switch (cond) {
4168 case kCondEQ:
4169 __ CmpEqD(dst, lhs, rhs);
4170 return false;
4171 case kCondNE:
4172 __ CmpEqD(dst, lhs, rhs);
4173 return true;
4174 case kCondLT:
4175 if (gt_bias) {
4176 __ CmpLtD(dst, lhs, rhs);
4177 } else {
4178 __ CmpUltD(dst, lhs, rhs);
4179 }
4180 return false;
4181 case kCondLE:
4182 if (gt_bias) {
4183 __ CmpLeD(dst, lhs, rhs);
4184 } else {
4185 __ CmpUleD(dst, lhs, rhs);
4186 }
4187 return false;
4188 case kCondGT:
4189 if (gt_bias) {
4190 __ CmpUltD(dst, rhs, lhs);
4191 } else {
4192 __ CmpLtD(dst, rhs, lhs);
4193 }
4194 return false;
4195 case kCondGE:
4196 if (gt_bias) {
4197 __ CmpUleD(dst, rhs, lhs);
4198 } else {
4199 __ CmpLeD(dst, rhs, lhs);
4200 }
4201 return false;
4202 default:
4203 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4204 UNREACHABLE();
4205 }
4206 }
4207}
4208
Alexey Frunze299a9392015-12-08 16:08:02 -08004209void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4210 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004211 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004212 LocationSummary* locations,
4213 Mips64Label* label) {
4214 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4215 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004216 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004217 switch (cond) {
4218 case kCondEQ:
4219 __ CmpEqS(FTMP, lhs, rhs);
4220 __ Bc1nez(FTMP, label);
4221 break;
4222 case kCondNE:
4223 __ CmpEqS(FTMP, lhs, rhs);
4224 __ Bc1eqz(FTMP, label);
4225 break;
4226 case kCondLT:
4227 if (gt_bias) {
4228 __ CmpLtS(FTMP, lhs, rhs);
4229 } else {
4230 __ CmpUltS(FTMP, lhs, rhs);
4231 }
4232 __ Bc1nez(FTMP, label);
4233 break;
4234 case kCondLE:
4235 if (gt_bias) {
4236 __ CmpLeS(FTMP, lhs, rhs);
4237 } else {
4238 __ CmpUleS(FTMP, lhs, rhs);
4239 }
4240 __ Bc1nez(FTMP, label);
4241 break;
4242 case kCondGT:
4243 if (gt_bias) {
4244 __ CmpUltS(FTMP, rhs, lhs);
4245 } else {
4246 __ CmpLtS(FTMP, rhs, lhs);
4247 }
4248 __ Bc1nez(FTMP, label);
4249 break;
4250 case kCondGE:
4251 if (gt_bias) {
4252 __ CmpUleS(FTMP, rhs, lhs);
4253 } else {
4254 __ CmpLeS(FTMP, rhs, lhs);
4255 }
4256 __ Bc1nez(FTMP, label);
4257 break;
4258 default:
4259 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004260 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004261 }
4262 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004263 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004264 switch (cond) {
4265 case kCondEQ:
4266 __ CmpEqD(FTMP, lhs, rhs);
4267 __ Bc1nez(FTMP, label);
4268 break;
4269 case kCondNE:
4270 __ CmpEqD(FTMP, lhs, rhs);
4271 __ Bc1eqz(FTMP, label);
4272 break;
4273 case kCondLT:
4274 if (gt_bias) {
4275 __ CmpLtD(FTMP, lhs, rhs);
4276 } else {
4277 __ CmpUltD(FTMP, lhs, rhs);
4278 }
4279 __ Bc1nez(FTMP, label);
4280 break;
4281 case kCondLE:
4282 if (gt_bias) {
4283 __ CmpLeD(FTMP, lhs, rhs);
4284 } else {
4285 __ CmpUleD(FTMP, lhs, rhs);
4286 }
4287 __ Bc1nez(FTMP, label);
4288 break;
4289 case kCondGT:
4290 if (gt_bias) {
4291 __ CmpUltD(FTMP, rhs, lhs);
4292 } else {
4293 __ CmpLtD(FTMP, rhs, lhs);
4294 }
4295 __ Bc1nez(FTMP, label);
4296 break;
4297 case kCondGE:
4298 if (gt_bias) {
4299 __ CmpUleD(FTMP, rhs, lhs);
4300 } else {
4301 __ CmpLeD(FTMP, rhs, lhs);
4302 }
4303 __ Bc1nez(FTMP, label);
4304 break;
4305 default:
4306 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004307 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004308 }
4309 }
4310}
4311
Alexey Frunze4dda3372015-06-01 18:31:49 -07004312void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004313 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004314 Mips64Label* true_target,
4315 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004316 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004317
David Brazdil0debae72015-11-12 18:37:00 +00004318 if (true_target == nullptr && false_target == nullptr) {
4319 // Nothing to do. The code always falls through.
4320 return;
4321 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004322 // Constant condition, statically compared against "true" (integer value 1).
4323 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004324 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004325 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004326 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004327 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004328 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004329 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004330 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004331 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004332 }
David Brazdil0debae72015-11-12 18:37:00 +00004333 return;
4334 }
4335
4336 // The following code generates these patterns:
4337 // (1) true_target == nullptr && false_target != nullptr
4338 // - opposite condition true => branch to false_target
4339 // (2) true_target != nullptr && false_target == nullptr
4340 // - condition true => branch to true_target
4341 // (3) true_target != nullptr && false_target != nullptr
4342 // - condition true => branch to true_target
4343 // - branch to false_target
4344 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004345 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004346 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004347 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004348 if (true_target == nullptr) {
4349 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4350 } else {
4351 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4352 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004353 } else {
4354 // The condition instruction has not been materialized, use its inputs as
4355 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004356 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004357 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004358 LocationSummary* locations = cond->GetLocations();
4359 IfCondition if_cond = condition->GetCondition();
4360 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004361
David Brazdil0debae72015-11-12 18:37:00 +00004362 if (true_target == nullptr) {
4363 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004364 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004365 }
4366
Alexey Frunze299a9392015-12-08 16:08:02 -08004367 switch (type) {
4368 default:
4369 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4370 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004371 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004372 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4373 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004374 case DataType::Type::kFloat32:
4375 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004376 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4377 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004378 }
4379 }
David Brazdil0debae72015-11-12 18:37:00 +00004380
4381 // If neither branch falls through (case 3), the conditional branch to `true_target`
4382 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4383 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004384 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004385 }
4386}
4387
4388void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004389 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004390 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004391 locations->SetInAt(0, Location::RequiresRegister());
4392 }
4393}
4394
4395void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004396 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4397 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004398 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004399 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004400 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004401 nullptr : codegen_->GetLabelOf(false_successor);
4402 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004403}
4404
4405void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004406 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004407 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004408 InvokeRuntimeCallingConvention calling_convention;
4409 RegisterSet caller_saves = RegisterSet::Empty();
4410 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4411 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004412 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004413 locations->SetInAt(0, Location::RequiresRegister());
4414 }
4415}
4416
4417void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004418 SlowPathCodeMIPS64* slow_path =
4419 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004420 GenerateTestAndBranch(deoptimize,
4421 /* condition_input_index */ 0,
4422 slow_path->GetEntryLabel(),
4423 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004424}
4425
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004426// This function returns true if a conditional move can be generated for HSelect.
4427// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4428// branches and regular moves.
4429//
4430// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4431//
4432// While determining feasibility of a conditional move and setting inputs/outputs
4433// are two distinct tasks, this function does both because they share quite a bit
4434// of common logic.
4435static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4436 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4437 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4438 HCondition* condition = cond->AsCondition();
4439
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004440 DataType::Type cond_type =
4441 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4442 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004443
4444 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4445 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4446 bool is_true_value_zero_constant =
4447 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4448 bool is_false_value_zero_constant =
4449 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4450
4451 bool can_move_conditionally = false;
4452 bool use_const_for_false_in = false;
4453 bool use_const_for_true_in = false;
4454
4455 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004456 if (!DataType::IsFloatingPointType(cond_type)) {
4457 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004458 // Moving int/long on int/long condition.
4459 if (is_true_value_zero_constant) {
4460 // seleqz out_reg, false_reg, cond_reg
4461 can_move_conditionally = true;
4462 use_const_for_true_in = true;
4463 } else if (is_false_value_zero_constant) {
4464 // selnez out_reg, true_reg, cond_reg
4465 can_move_conditionally = true;
4466 use_const_for_false_in = true;
4467 } else if (materialized) {
4468 // Not materializing unmaterialized int conditions
4469 // to keep the instruction count low.
4470 // selnez AT, true_reg, cond_reg
4471 // seleqz TMP, false_reg, cond_reg
4472 // or out_reg, AT, TMP
4473 can_move_conditionally = true;
4474 }
4475 } else {
4476 // Moving float/double on int/long condition.
4477 if (materialized) {
4478 // Not materializing unmaterialized int conditions
4479 // to keep the instruction count low.
4480 can_move_conditionally = true;
4481 if (is_true_value_zero_constant) {
4482 // sltu TMP, ZERO, cond_reg
4483 // mtc1 TMP, temp_cond_reg
4484 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4485 use_const_for_true_in = true;
4486 } else if (is_false_value_zero_constant) {
4487 // sltu TMP, ZERO, cond_reg
4488 // mtc1 TMP, temp_cond_reg
4489 // selnez.fmt out_reg, true_reg, temp_cond_reg
4490 use_const_for_false_in = true;
4491 } else {
4492 // sltu TMP, ZERO, cond_reg
4493 // mtc1 TMP, temp_cond_reg
4494 // sel.fmt temp_cond_reg, false_reg, true_reg
4495 // mov.fmt out_reg, temp_cond_reg
4496 }
4497 }
4498 }
4499 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004500 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004501 // Moving int/long on float/double condition.
4502 can_move_conditionally = true;
4503 if (is_true_value_zero_constant) {
4504 // mfc1 TMP, temp_cond_reg
4505 // seleqz out_reg, false_reg, TMP
4506 use_const_for_true_in = true;
4507 } else if (is_false_value_zero_constant) {
4508 // mfc1 TMP, temp_cond_reg
4509 // selnez out_reg, true_reg, TMP
4510 use_const_for_false_in = true;
4511 } else {
4512 // mfc1 TMP, temp_cond_reg
4513 // selnez AT, true_reg, TMP
4514 // seleqz TMP, false_reg, TMP
4515 // or out_reg, AT, TMP
4516 }
4517 } else {
4518 // Moving float/double on float/double condition.
4519 can_move_conditionally = true;
4520 if (is_true_value_zero_constant) {
4521 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4522 use_const_for_true_in = true;
4523 } else if (is_false_value_zero_constant) {
4524 // selnez.fmt out_reg, true_reg, temp_cond_reg
4525 use_const_for_false_in = true;
4526 } else {
4527 // sel.fmt temp_cond_reg, false_reg, true_reg
4528 // mov.fmt out_reg, temp_cond_reg
4529 }
4530 }
4531 }
4532 }
4533
4534 if (can_move_conditionally) {
4535 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4536 } else {
4537 DCHECK(!use_const_for_false_in);
4538 DCHECK(!use_const_for_true_in);
4539 }
4540
4541 if (locations_to_set != nullptr) {
4542 if (use_const_for_false_in) {
4543 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4544 } else {
4545 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004546 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004547 ? Location::RequiresFpuRegister()
4548 : Location::RequiresRegister());
4549 }
4550 if (use_const_for_true_in) {
4551 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4552 } else {
4553 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004554 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004555 ? Location::RequiresFpuRegister()
4556 : Location::RequiresRegister());
4557 }
4558 if (materialized) {
4559 locations_to_set->SetInAt(2, Location::RequiresRegister());
4560 }
4561
4562 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004563 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004564 ? Location::RequiresFpuRegister()
4565 : Location::RequiresRegister());
4566 } else {
4567 locations_to_set->SetOut(Location::SameAsFirstInput());
4568 }
4569 }
4570
4571 return can_move_conditionally;
4572}
4573
4574
4575void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4576 LocationSummary* locations = select->GetLocations();
4577 Location dst = locations->Out();
4578 Location false_src = locations->InAt(0);
4579 Location true_src = locations->InAt(1);
4580 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4581 GpuRegister cond_reg = TMP;
4582 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004583 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004584 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004585 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004586
4587 if (IsBooleanValueOrMaterializedCondition(cond)) {
4588 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4589 } else {
4590 HCondition* condition = cond->AsCondition();
4591 LocationSummary* cond_locations = cond->GetLocations();
4592 IfCondition if_cond = condition->GetCondition();
4593 cond_type = condition->InputAt(0)->GetType();
4594 switch (cond_type) {
4595 default:
4596 cond_inverted = MaterializeIntLongCompare(if_cond,
4597 /* is64bit */ false,
4598 cond_locations,
4599 cond_reg);
4600 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004601 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004602 cond_inverted = MaterializeIntLongCompare(if_cond,
4603 /* is64bit */ true,
4604 cond_locations,
4605 cond_reg);
4606 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004607 case DataType::Type::kFloat32:
4608 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004609 cond_inverted = MaterializeFpCompare(if_cond,
4610 condition->IsGtBias(),
4611 cond_type,
4612 cond_locations,
4613 fcond_reg);
4614 break;
4615 }
4616 }
4617
4618 if (true_src.IsConstant()) {
4619 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4620 }
4621 if (false_src.IsConstant()) {
4622 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4623 }
4624
4625 switch (dst_type) {
4626 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004627 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004628 __ Mfc1(cond_reg, fcond_reg);
4629 }
4630 if (true_src.IsConstant()) {
4631 if (cond_inverted) {
4632 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4633 } else {
4634 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4635 }
4636 } else if (false_src.IsConstant()) {
4637 if (cond_inverted) {
4638 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4639 } else {
4640 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4641 }
4642 } else {
4643 DCHECK_NE(cond_reg, AT);
4644 if (cond_inverted) {
4645 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4646 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4647 } else {
4648 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4649 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4650 }
4651 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4652 }
4653 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004654 case DataType::Type::kFloat32: {
4655 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004656 // sel*.fmt tests bit 0 of the condition register, account for that.
4657 __ Sltu(TMP, ZERO, cond_reg);
4658 __ Mtc1(TMP, fcond_reg);
4659 }
4660 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4661 if (true_src.IsConstant()) {
4662 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4663 if (cond_inverted) {
4664 __ SelnezS(dst_reg, src_reg, fcond_reg);
4665 } else {
4666 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4667 }
4668 } else if (false_src.IsConstant()) {
4669 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4670 if (cond_inverted) {
4671 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4672 } else {
4673 __ SelnezS(dst_reg, src_reg, fcond_reg);
4674 }
4675 } else {
4676 if (cond_inverted) {
4677 __ SelS(fcond_reg,
4678 true_src.AsFpuRegister<FpuRegister>(),
4679 false_src.AsFpuRegister<FpuRegister>());
4680 } else {
4681 __ SelS(fcond_reg,
4682 false_src.AsFpuRegister<FpuRegister>(),
4683 true_src.AsFpuRegister<FpuRegister>());
4684 }
4685 __ MovS(dst_reg, fcond_reg);
4686 }
4687 break;
4688 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004689 case DataType::Type::kFloat64: {
4690 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004691 // sel*.fmt tests bit 0 of the condition register, account for that.
4692 __ Sltu(TMP, ZERO, cond_reg);
4693 __ Mtc1(TMP, fcond_reg);
4694 }
4695 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4696 if (true_src.IsConstant()) {
4697 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4698 if (cond_inverted) {
4699 __ SelnezD(dst_reg, src_reg, fcond_reg);
4700 } else {
4701 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4702 }
4703 } else if (false_src.IsConstant()) {
4704 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4705 if (cond_inverted) {
4706 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4707 } else {
4708 __ SelnezD(dst_reg, src_reg, fcond_reg);
4709 }
4710 } else {
4711 if (cond_inverted) {
4712 __ SelD(fcond_reg,
4713 true_src.AsFpuRegister<FpuRegister>(),
4714 false_src.AsFpuRegister<FpuRegister>());
4715 } else {
4716 __ SelD(fcond_reg,
4717 false_src.AsFpuRegister<FpuRegister>(),
4718 true_src.AsFpuRegister<FpuRegister>());
4719 }
4720 __ MovD(dst_reg, fcond_reg);
4721 }
4722 break;
4723 }
4724 }
4725}
4726
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004727void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004728 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004729 LocationSummary(flag, LocationSummary::kNoCall);
4730 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004731}
4732
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004733void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4734 __ LoadFromOffset(kLoadWord,
4735 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4736 SP,
4737 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004738}
4739
David Brazdil74eb1b22015-12-14 11:44:01 +00004740void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004741 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004742 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004743}
4744
4745void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004746 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4747 GenConditionalMove(select);
4748 } else {
4749 LocationSummary* locations = select->GetLocations();
4750 Mips64Label false_target;
4751 GenerateTestAndBranch(select,
4752 /* condition_input_index */ 2,
4753 /* true_target */ nullptr,
4754 &false_target);
4755 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4756 __ Bind(&false_target);
4757 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004758}
4759
David Srbecky0cf44932015-12-09 14:09:59 +00004760void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004761 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004762}
4763
David Srbeckyd28f4a02016-03-14 17:14:24 +00004764void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4765 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004766}
4767
4768void CodeGeneratorMIPS64::GenerateNop() {
4769 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004770}
4771
Alexey Frunze4dda3372015-06-01 18:31:49 -07004772void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004773 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004774 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004775 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004776 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004777 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004778 instruction,
4779 object_field_get_with_read_barrier
4780 ? LocationSummary::kCallOnSlowPath
4781 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004782 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4783 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4784 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004785 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004786 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004787 locations->SetOut(Location::RequiresFpuRegister());
4788 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004789 // The output overlaps in the case of an object field get with
4790 // read barriers enabled: we do not want the move to overwrite the
4791 // object's location, as we need it to emit the read barrier.
4792 locations->SetOut(Location::RequiresRegister(),
4793 object_field_get_with_read_barrier
4794 ? Location::kOutputOverlap
4795 : Location::kNoOutputOverlap);
4796 }
4797 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4798 // We need a temporary register for the read barrier marking slow
4799 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004800 if (!kBakerReadBarrierThunksEnableForFields) {
4801 locations->AddTemp(Location::RequiresRegister());
4802 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004803 }
4804}
4805
4806void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4807 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004808 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4809 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004810 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004811 Location obj_loc = locations->InAt(0);
4812 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4813 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004814 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004815 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004816 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004817 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4818
Alexey Frunze4dda3372015-06-01 18:31:49 -07004819 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004820 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004821 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004822 load_type = kLoadUnsignedByte;
4823 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004824 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004825 load_type = kLoadSignedByte;
4826 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004827 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004828 load_type = kLoadUnsignedHalfword;
4829 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004830 case DataType::Type::kInt16:
4831 load_type = kLoadSignedHalfword;
4832 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004833 case DataType::Type::kInt32:
4834 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004835 load_type = kLoadWord;
4836 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004837 case DataType::Type::kInt64:
4838 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004839 load_type = kLoadDoubleword;
4840 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004841 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004842 load_type = kLoadUnsignedWord;
4843 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004844 case DataType::Type::kUint32:
4845 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004846 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004847 LOG(FATAL) << "Unreachable type " << type;
4848 UNREACHABLE();
4849 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004850 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004851 DCHECK(dst_loc.IsRegister());
4852 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004853 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004854 // /* HeapReference<Object> */ dst = *(obj + offset)
4855 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004856 Location temp_loc =
4857 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004858 // Note that a potential implicit null check is handled in this
4859 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4860 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4861 dst_loc,
4862 obj,
4863 offset,
4864 temp_loc,
4865 /* needs_null_check */ true);
4866 if (is_volatile) {
4867 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4868 }
4869 } else {
4870 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4871 if (is_volatile) {
4872 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4873 }
4874 // If read barriers are enabled, emit read barriers other than
4875 // Baker's using a slow path (and also unpoison the loaded
4876 // reference, if heap poisoning is enabled).
4877 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4878 }
4879 } else {
4880 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4881 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004882 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004883 DCHECK(dst_loc.IsFpuRegister());
4884 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004885 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004886 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004887
Alexey Frunze15958152017-02-09 19:08:30 -08004888 // Memory barriers, in the case of references, are handled in the
4889 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004890 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004891 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004892 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004893}
4894
4895void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4896 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4897 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004898 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004899 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004900 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004901 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004902 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004903 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004904 }
4905}
4906
4907void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004908 const FieldInfo& field_info,
4909 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004910 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004911 LocationSummary* locations = instruction->GetLocations();
4912 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004913 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004914 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004915 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004916 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4917 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004918 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4919
Alexey Frunze4dda3372015-06-01 18:31:49 -07004920 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004921 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004922 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004923 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004924 store_type = kStoreByte;
4925 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004926 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004927 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004928 store_type = kStoreHalfword;
4929 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004930 case DataType::Type::kInt32:
4931 case DataType::Type::kFloat32:
4932 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004933 store_type = kStoreWord;
4934 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004935 case DataType::Type::kInt64:
4936 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004937 store_type = kStoreDoubleword;
4938 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004939 case DataType::Type::kUint32:
4940 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004941 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004942 LOG(FATAL) << "Unreachable type " << type;
4943 UNREACHABLE();
4944 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004945
Alexey Frunze15958152017-02-09 19:08:30 -08004946 if (is_volatile) {
4947 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4948 }
4949
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004950 if (value_location.IsConstant()) {
4951 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4952 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4953 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004954 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004955 DCHECK(value_location.IsRegister());
4956 GpuRegister src = value_location.AsRegister<GpuRegister>();
4957 if (kPoisonHeapReferences && needs_write_barrier) {
4958 // Note that in the case where `value` is a null reference,
4959 // we do not enter this block, as a null reference does not
4960 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004961 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004962 __ PoisonHeapReference(TMP, src);
4963 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4964 } else {
4965 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4966 }
4967 } else {
4968 DCHECK(value_location.IsFpuRegister());
4969 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4970 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4971 }
4972 }
Alexey Frunze15958152017-02-09 19:08:30 -08004973
Alexey Frunzec061de12017-02-14 13:27:23 -08004974 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004975 DCHECK(value_location.IsRegister());
4976 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004977 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004978 }
Alexey Frunze15958152017-02-09 19:08:30 -08004979
4980 if (is_volatile) {
4981 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4982 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004983}
4984
4985void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4986 HandleFieldGet(instruction, instruction->GetFieldInfo());
4987}
4988
4989void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4990 HandleFieldGet(instruction, instruction->GetFieldInfo());
4991}
4992
4993void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4994 HandleFieldSet(instruction, instruction->GetFieldInfo());
4995}
4996
4997void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004998 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004999}
5000
Alexey Frunze15958152017-02-09 19:08:30 -08005001void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
5002 HInstruction* instruction,
5003 Location out,
5004 uint32_t offset,
5005 Location maybe_temp,
5006 ReadBarrierOption read_barrier_option) {
5007 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5008 if (read_barrier_option == kWithReadBarrier) {
5009 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005010 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
5011 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5012 }
Alexey Frunze15958152017-02-09 19:08:30 -08005013 if (kUseBakerReadBarrier) {
5014 // Load with fast path based Baker's read barrier.
5015 // /* HeapReference<Object> */ out = *(out + offset)
5016 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5017 out,
5018 out_reg,
5019 offset,
5020 maybe_temp,
5021 /* needs_null_check */ false);
5022 } else {
5023 // Load with slow path based read barrier.
5024 // Save the value of `out` into `maybe_temp` before overwriting it
5025 // in the following move operation, as we will need it for the
5026 // read barrier below.
5027 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
5028 // /* HeapReference<Object> */ out = *(out + offset)
5029 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5030 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
5031 }
5032 } else {
5033 // Plain load with no read barrier.
5034 // /* HeapReference<Object> */ out = *(out + offset)
5035 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
5036 __ MaybeUnpoisonHeapReference(out_reg);
5037 }
5038}
5039
5040void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
5041 HInstruction* instruction,
5042 Location out,
5043 Location obj,
5044 uint32_t offset,
5045 Location maybe_temp,
5046 ReadBarrierOption read_barrier_option) {
5047 GpuRegister out_reg = out.AsRegister<GpuRegister>();
5048 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
5049 if (read_barrier_option == kWithReadBarrier) {
5050 CHECK(kEmitCompilerReadBarrier);
5051 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005052 if (!kBakerReadBarrierThunksEnableForFields) {
5053 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
5054 }
Alexey Frunze15958152017-02-09 19:08:30 -08005055 // Load with fast path based Baker's read barrier.
5056 // /* HeapReference<Object> */ out = *(obj + offset)
5057 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
5058 out,
5059 obj_reg,
5060 offset,
5061 maybe_temp,
5062 /* needs_null_check */ false);
5063 } else {
5064 // Load with slow path based read barrier.
5065 // /* HeapReference<Object> */ out = *(obj + offset)
5066 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5067 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5068 }
5069 } else {
5070 // Plain load with no read barrier.
5071 // /* HeapReference<Object> */ out = *(obj + offset)
5072 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5073 __ MaybeUnpoisonHeapReference(out_reg);
5074 }
5075}
5076
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005077static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5078 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5079 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5080 return reg - V0;
5081 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5082 return 13 + (reg - S2);
5083 } else if (reg == S8) { // One more.
5084 return 19;
5085 }
5086 LOG(FATAL) << "Unexpected register " << reg;
5087 UNREACHABLE();
5088}
5089
5090static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5091 int num = GetBakerMarkThunkNumber(reg) +
5092 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5093 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5094}
5095
5096static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5097 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5098 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5099}
5100
5101void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5102 Location root,
5103 GpuRegister obj,
5104 uint32_t offset,
5105 ReadBarrierOption read_barrier_option,
5106 Mips64Label* label_low) {
5107 if (label_low != nullptr) {
5108 DCHECK_EQ(offset, 0x5678u);
5109 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005110 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005111 if (read_barrier_option == kWithReadBarrier) {
5112 DCHECK(kEmitCompilerReadBarrier);
5113 if (kUseBakerReadBarrier) {
5114 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5115 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005116 if (kBakerReadBarrierThunksEnableForGcRoots) {
5117 // Note that we do not actually check the value of `GetIsGcMarking()`
5118 // to decide whether to mark the loaded GC root or not. Instead, we
5119 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5120 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5121 // vice versa.
5122 //
5123 // We use thunks for the slow path. That thunk checks the reference
5124 // and jumps to the entrypoint if needed.
5125 //
5126 // temp = Thread::Current()->pReadBarrierMarkReg00
5127 // // AKA &art_quick_read_barrier_mark_introspection.
5128 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5129 // if (temp != nullptr) {
5130 // temp = &gc_root_thunk<root_reg>
5131 // root = temp(root)
5132 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005133
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005134 const int32_t entry_point_offset =
5135 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5136 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5137 int16_t offset_low = Low16Bits(offset);
5138 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5139 // extension in lwu.
5140 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5141 GpuRegister base = short_offset ? obj : TMP;
5142 // Loading the entrypoint does not require a load acquire since it is only changed when
5143 // threads are suspended or running a checkpoint.
5144 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5145 if (!short_offset) {
5146 DCHECK(!label_low);
5147 __ Daui(base, obj, offset_high);
5148 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005149 Mips64Label skip_call;
5150 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005151 if (label_low != nullptr) {
5152 DCHECK(short_offset);
5153 __ Bind(label_low);
5154 }
5155 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5156 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5157 // in delay slot.
5158 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005159 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005160 } else {
5161 // Note that we do not actually check the value of `GetIsGcMarking()`
5162 // to decide whether to mark the loaded GC root or not. Instead, we
5163 // load into `temp` (T9) the read barrier mark entry point corresponding
5164 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5165 // is false, and vice versa.
5166 //
5167 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5168 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5169 // if (temp != null) {
5170 // root = temp(root)
5171 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005172
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005173 if (label_low != nullptr) {
5174 __ Bind(label_low);
5175 }
5176 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5177 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5178 static_assert(
5179 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5180 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5181 "have different sizes.");
5182 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5183 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5184 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005185
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005186 // Slow path marking the GC root `root`.
5187 Location temp = Location::RegisterLocation(T9);
5188 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005189 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005190 instruction,
5191 root,
5192 /*entrypoint*/ temp);
5193 codegen_->AddSlowPath(slow_path);
5194
5195 const int32_t entry_point_offset =
5196 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5197 // Loading the entrypoint does not require a load acquire since it is only changed when
5198 // threads are suspended or running a checkpoint.
5199 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5200 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5201 __ Bind(slow_path->GetExitLabel());
5202 }
Alexey Frunze15958152017-02-09 19:08:30 -08005203 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005204 if (label_low != nullptr) {
5205 __ Bind(label_low);
5206 }
Alexey Frunze15958152017-02-09 19:08:30 -08005207 // GC root loaded through a slow path for read barriers other
5208 // than Baker's.
5209 // /* GcRoot<mirror::Object>* */ root = obj + offset
5210 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5211 // /* mirror::Object* */ root = root->Read()
5212 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5213 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005214 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005215 if (label_low != nullptr) {
5216 __ Bind(label_low);
5217 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005218 // Plain GC root load with no read barrier.
5219 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5220 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5221 // Note that GC roots are not affected by heap poisoning, thus we
5222 // do not have to unpoison `root_reg` here.
5223 }
5224}
5225
Alexey Frunze15958152017-02-09 19:08:30 -08005226void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5227 Location ref,
5228 GpuRegister obj,
5229 uint32_t offset,
5230 Location temp,
5231 bool needs_null_check) {
5232 DCHECK(kEmitCompilerReadBarrier);
5233 DCHECK(kUseBakerReadBarrier);
5234
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005235 if (kBakerReadBarrierThunksEnableForFields) {
5236 // Note that we do not actually check the value of `GetIsGcMarking()`
5237 // to decide whether to mark the loaded reference or not. Instead, we
5238 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5239 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5240 // vice versa.
5241 //
5242 // We use thunks for the slow path. That thunk checks the reference
5243 // and jumps to the entrypoint if needed. If the holder is not gray,
5244 // it issues a load-load memory barrier and returns to the original
5245 // reference load.
5246 //
5247 // temp = Thread::Current()->pReadBarrierMarkReg00
5248 // // AKA &art_quick_read_barrier_mark_introspection.
5249 // if (temp != nullptr) {
5250 // temp = &field_array_thunk<holder_reg>
5251 // temp()
5252 // }
5253 // not_gray_return_address:
5254 // // If the offset is too large to fit into the lw instruction, we
5255 // // use an adjusted base register (TMP) here. This register
5256 // // receives bits 16 ... 31 of the offset before the thunk invocation
5257 // // and the thunk benefits from it.
5258 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5259 // gray_return_address:
5260
5261 DCHECK(temp.IsInvalid());
5262 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5263 const int32_t entry_point_offset =
5264 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5265 // There may have or may have not been a null check if the field offset is smaller than
5266 // the page size.
5267 // There must've been a null check in case it's actually a load from an array.
5268 // We will, however, perform an explicit null check in the thunk as it's easier to
5269 // do it than not.
5270 if (instruction->IsArrayGet()) {
5271 DCHECK(!needs_null_check);
5272 }
5273 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5274 // Loading the entrypoint does not require a load acquire since it is only changed when
5275 // threads are suspended or running a checkpoint.
5276 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5277 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005278 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005279 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005280 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005281 __ Nop(); // In forbidden slot.
5282 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005283 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005284 // /* HeapReference<Object> */ ref = *(obj + offset)
5285 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5286 } else {
5287 int16_t offset_low = Low16Bits(offset);
5288 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005289 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005290 __ Daui(TMP, obj, offset_high); // In delay slot.
5291 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005292 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005293 // /* HeapReference<Object> */ ref = *(obj + offset)
5294 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5295 }
5296 if (needs_null_check) {
5297 MaybeRecordImplicitNullCheck(instruction);
5298 }
5299 __ MaybeUnpoisonHeapReference(ref_reg);
5300 return;
5301 }
5302
Alexey Frunze15958152017-02-09 19:08:30 -08005303 // /* HeapReference<Object> */ ref = *(obj + offset)
5304 Location no_index = Location::NoLocation();
5305 ScaleFactor no_scale_factor = TIMES_1;
5306 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5307 ref,
5308 obj,
5309 offset,
5310 no_index,
5311 no_scale_factor,
5312 temp,
5313 needs_null_check);
5314}
5315
5316void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5317 Location ref,
5318 GpuRegister obj,
5319 uint32_t data_offset,
5320 Location index,
5321 Location temp,
5322 bool needs_null_check) {
5323 DCHECK(kEmitCompilerReadBarrier);
5324 DCHECK(kUseBakerReadBarrier);
5325
5326 static_assert(
5327 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5328 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005329 ScaleFactor scale_factor = TIMES_4;
5330
5331 if (kBakerReadBarrierThunksEnableForArrays) {
5332 // Note that we do not actually check the value of `GetIsGcMarking()`
5333 // to decide whether to mark the loaded reference or not. Instead, we
5334 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5335 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5336 // vice versa.
5337 //
5338 // We use thunks for the slow path. That thunk checks the reference
5339 // and jumps to the entrypoint if needed. If the holder is not gray,
5340 // it issues a load-load memory barrier and returns to the original
5341 // reference load.
5342 //
5343 // temp = Thread::Current()->pReadBarrierMarkReg00
5344 // // AKA &art_quick_read_barrier_mark_introspection.
5345 // if (temp != nullptr) {
5346 // temp = &field_array_thunk<holder_reg>
5347 // temp()
5348 // }
5349 // not_gray_return_address:
5350 // // The element address is pre-calculated in the TMP register before the
5351 // // thunk invocation and the thunk benefits from it.
5352 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5353 // gray_return_address:
5354
5355 DCHECK(temp.IsInvalid());
5356 DCHECK(index.IsValid());
5357 const int32_t entry_point_offset =
5358 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5359 // We will not do the explicit null check in the thunk as some form of a null check
5360 // must've been done earlier.
5361 DCHECK(!needs_null_check);
5362 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5363 // Loading the entrypoint does not require a load acquire since it is only changed when
5364 // threads are suspended or running a checkpoint.
5365 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005366 Mips64Label skip_call;
5367 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005368 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5369 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5370 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5371 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005372 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005373 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5374 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5375 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5376 __ MaybeUnpoisonHeapReference(ref_reg);
5377 return;
5378 }
5379
Alexey Frunze15958152017-02-09 19:08:30 -08005380 // /* HeapReference<Object> */ ref =
5381 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005382 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5383 ref,
5384 obj,
5385 data_offset,
5386 index,
5387 scale_factor,
5388 temp,
5389 needs_null_check);
5390}
5391
5392void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5393 Location ref,
5394 GpuRegister obj,
5395 uint32_t offset,
5396 Location index,
5397 ScaleFactor scale_factor,
5398 Location temp,
5399 bool needs_null_check,
5400 bool always_update_field) {
5401 DCHECK(kEmitCompilerReadBarrier);
5402 DCHECK(kUseBakerReadBarrier);
5403
5404 // In slow path based read barriers, the read barrier call is
5405 // inserted after the original load. However, in fast path based
5406 // Baker's read barriers, we need to perform the load of
5407 // mirror::Object::monitor_ *before* the original reference load.
5408 // This load-load ordering is required by the read barrier.
5409 // The fast path/slow path (for Baker's algorithm) should look like:
5410 //
5411 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5412 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5413 // HeapReference<Object> ref = *src; // Original reference load.
5414 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5415 // if (is_gray) {
5416 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5417 // }
5418 //
5419 // Note: the original implementation in ReadBarrier::Barrier is
5420 // slightly more complex as it performs additional checks that we do
5421 // not do here for performance reasons.
5422
5423 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5424 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5425 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5426
5427 // /* int32_t */ monitor = obj->monitor_
5428 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5429 if (needs_null_check) {
5430 MaybeRecordImplicitNullCheck(instruction);
5431 }
5432 // /* LockWord */ lock_word = LockWord(monitor)
5433 static_assert(sizeof(LockWord) == sizeof(int32_t),
5434 "art::LockWord and int32_t have different sizes.");
5435
5436 __ Sync(0); // Barrier to prevent load-load reordering.
5437
5438 // The actual reference load.
5439 if (index.IsValid()) {
5440 // Load types involving an "index": ArrayGet,
5441 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5442 // intrinsics.
5443 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5444 if (index.IsConstant()) {
5445 size_t computed_offset =
5446 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5447 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5448 } else {
5449 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005450 if (scale_factor == TIMES_1) {
5451 __ Daddu(TMP, index_reg, obj);
5452 } else {
5453 __ Dlsa(TMP, index_reg, obj, scale_factor);
5454 }
Alexey Frunze15958152017-02-09 19:08:30 -08005455 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5456 }
5457 } else {
5458 // /* HeapReference<Object> */ ref = *(obj + offset)
5459 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5460 }
5461
5462 // Object* ref = ref_addr->AsMirrorPtr()
5463 __ MaybeUnpoisonHeapReference(ref_reg);
5464
5465 // Slow path marking the object `ref` when it is gray.
5466 SlowPathCodeMIPS64* slow_path;
5467 if (always_update_field) {
5468 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5469 // of the form `obj + field_offset`, where `obj` is a register and
5470 // `field_offset` is a register. Thus `offset` and `scale_factor`
5471 // above are expected to be null in this code path.
5472 DCHECK_EQ(offset, 0u);
5473 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005474 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005475 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5476 ref,
5477 obj,
5478 /* field_offset */ index,
5479 temp_reg);
5480 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005481 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005482 }
5483 AddSlowPath(slow_path);
5484
5485 // if (rb_state == ReadBarrier::GrayState())
5486 // ref = ReadBarrier::Mark(ref);
5487 // Given the numeric representation, it's enough to check the low bit of the
5488 // rb_state. We do that by shifting the bit into the sign bit (31) and
5489 // performing a branch on less than zero.
5490 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5491 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5492 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5493 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5494 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5495 __ Bind(slow_path->GetExitLabel());
5496}
5497
5498void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5499 Location out,
5500 Location ref,
5501 Location obj,
5502 uint32_t offset,
5503 Location index) {
5504 DCHECK(kEmitCompilerReadBarrier);
5505
5506 // Insert a slow path based read barrier *after* the reference load.
5507 //
5508 // If heap poisoning is enabled, the unpoisoning of the loaded
5509 // reference will be carried out by the runtime within the slow
5510 // path.
5511 //
5512 // Note that `ref` currently does not get unpoisoned (when heap
5513 // poisoning is enabled), which is alright as the `ref` argument is
5514 // not used by the artReadBarrierSlow entry point.
5515 //
5516 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005517 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005518 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5519 AddSlowPath(slow_path);
5520
5521 __ Bc(slow_path->GetEntryLabel());
5522 __ Bind(slow_path->GetExitLabel());
5523}
5524
5525void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5526 Location out,
5527 Location ref,
5528 Location obj,
5529 uint32_t offset,
5530 Location index) {
5531 if (kEmitCompilerReadBarrier) {
5532 // Baker's read barriers shall be handled by the fast path
5533 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5534 DCHECK(!kUseBakerReadBarrier);
5535 // If heap poisoning is enabled, unpoisoning will be taken care of
5536 // by the runtime within the slow path.
5537 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5538 } else if (kPoisonHeapReferences) {
5539 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5540 }
5541}
5542
5543void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5544 Location out,
5545 Location root) {
5546 DCHECK(kEmitCompilerReadBarrier);
5547
5548 // Insert a slow path based read barrier *after* the GC root load.
5549 //
5550 // Note that GC roots are not affected by heap poisoning, so we do
5551 // not need to do anything special for this here.
5552 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005553 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005554 AddSlowPath(slow_path);
5555
5556 __ Bc(slow_path->GetEntryLabel());
5557 __ Bind(slow_path->GetExitLabel());
5558}
5559
Alexey Frunze4dda3372015-06-01 18:31:49 -07005560void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005561 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5562 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005563 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005564 switch (type_check_kind) {
5565 case TypeCheckKind::kExactCheck:
5566 case TypeCheckKind::kAbstractClassCheck:
5567 case TypeCheckKind::kClassHierarchyCheck:
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005568 case TypeCheckKind::kArrayObjectCheck: {
5569 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5570 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5571 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005572 break;
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005573 }
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005574 case TypeCheckKind::kArrayCheck:
5575 case TypeCheckKind::kUnresolvedCheck:
5576 case TypeCheckKind::kInterfaceCheck:
5577 call_kind = LocationSummary::kCallOnSlowPath;
5578 break;
Vladimir Marko175e7862018-03-27 09:03:13 +00005579 case TypeCheckKind::kBitstringCheck:
5580 break;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005581 }
5582
Vladimir Markoca6fff82017-10-03 14:49:14 +01005583 LocationSummary* locations =
5584 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005585 if (baker_read_barrier_slow_path) {
5586 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5587 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005588 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko175e7862018-03-27 09:03:13 +00005589 if (type_check_kind == TypeCheckKind::kBitstringCheck) {
5590 locations->SetInAt(1, Location::ConstantLocation(instruction->InputAt(1)->AsConstant()));
5591 locations->SetInAt(2, Location::ConstantLocation(instruction->InputAt(2)->AsConstant()));
5592 locations->SetInAt(3, Location::ConstantLocation(instruction->InputAt(3)->AsConstant()));
5593 } else {
5594 locations->SetInAt(1, Location::RequiresRegister());
5595 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005596 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005597 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005598 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005599 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005600}
5601
5602void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005603 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005604 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005605 Location obj_loc = locations->InAt(0);
5606 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Vladimir Marko175e7862018-03-27 09:03:13 +00005607 Location cls = locations->InAt(1);
Alexey Frunze15958152017-02-09 19:08:30 -08005608 Location out_loc = locations->Out();
5609 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5610 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5611 DCHECK_LE(num_temps, 1u);
5612 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005613 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5614 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5615 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5616 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005617 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005618 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005619
5620 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005621 // Avoid this check if we know `obj` is not null.
5622 if (instruction->MustDoNullCheck()) {
5623 __ Move(out, ZERO);
5624 __ Beqzc(obj, &done);
5625 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005626
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005627 switch (type_check_kind) {
5628 case TypeCheckKind::kExactCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005629 ReadBarrierOption read_barrier_option =
5630 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005631 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005632 GenerateReferenceLoadTwoRegisters(instruction,
5633 out_loc,
5634 obj_loc,
5635 class_offset,
5636 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005637 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005638 // Classes must be equal for the instanceof to succeed.
Vladimir Marko175e7862018-03-27 09:03:13 +00005639 __ Xor(out, out, cls.AsRegister<GpuRegister>());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005640 __ Sltiu(out, out, 1);
5641 break;
5642 }
5643
5644 case TypeCheckKind::kAbstractClassCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005645 ReadBarrierOption read_barrier_option =
5646 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005647 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005648 GenerateReferenceLoadTwoRegisters(instruction,
5649 out_loc,
5650 obj_loc,
5651 class_offset,
5652 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005653 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005654 // If the class is abstract, we eagerly fetch the super class of the
5655 // object to avoid doing a comparison we know will fail.
5656 Mips64Label loop;
5657 __ Bind(&loop);
5658 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005659 GenerateReferenceLoadOneRegister(instruction,
5660 out_loc,
5661 super_offset,
5662 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005663 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005664 // If `out` is null, we use it for the result, and jump to `done`.
5665 __ Beqzc(out, &done);
Vladimir Marko175e7862018-03-27 09:03:13 +00005666 __ Bnec(out, cls.AsRegister<GpuRegister>(), &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005667 __ LoadConst32(out, 1);
5668 break;
5669 }
5670
5671 case TypeCheckKind::kClassHierarchyCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005672 ReadBarrierOption read_barrier_option =
5673 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005674 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005675 GenerateReferenceLoadTwoRegisters(instruction,
5676 out_loc,
5677 obj_loc,
5678 class_offset,
5679 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005680 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005681 // Walk over the class hierarchy to find a match.
5682 Mips64Label loop, success;
5683 __ Bind(&loop);
Vladimir Marko175e7862018-03-27 09:03:13 +00005684 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005685 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005686 GenerateReferenceLoadOneRegister(instruction,
5687 out_loc,
5688 super_offset,
5689 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005690 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005691 __ Bnezc(out, &loop);
5692 // If `out` is null, we use it for the result, and jump to `done`.
5693 __ Bc(&done);
5694 __ Bind(&success);
5695 __ LoadConst32(out, 1);
5696 break;
5697 }
5698
5699 case TypeCheckKind::kArrayObjectCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005700 ReadBarrierOption read_barrier_option =
5701 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005702 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005703 GenerateReferenceLoadTwoRegisters(instruction,
5704 out_loc,
5705 obj_loc,
5706 class_offset,
5707 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005708 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005709 // Do an exact check.
5710 Mips64Label success;
Vladimir Marko175e7862018-03-27 09:03:13 +00005711 __ Beqc(out, cls.AsRegister<GpuRegister>(), &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005712 // Otherwise, we need to check that the object's class is a non-primitive array.
5713 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005714 GenerateReferenceLoadOneRegister(instruction,
5715 out_loc,
5716 component_offset,
5717 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005718 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005719 // If `out` is null, we use it for the result, and jump to `done`.
5720 __ Beqzc(out, &done);
5721 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5722 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5723 __ Sltiu(out, out, 1);
5724 __ Bc(&done);
5725 __ Bind(&success);
5726 __ LoadConst32(out, 1);
5727 break;
5728 }
5729
5730 case TypeCheckKind::kArrayCheck: {
5731 // No read barrier since the slow path will retry upon failure.
5732 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005733 GenerateReferenceLoadTwoRegisters(instruction,
5734 out_loc,
5735 obj_loc,
5736 class_offset,
5737 maybe_temp_loc,
5738 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005739 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005740 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5741 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005742 codegen_->AddSlowPath(slow_path);
Vladimir Marko175e7862018-03-27 09:03:13 +00005743 __ Bnec(out, cls.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005744 __ LoadConst32(out, 1);
5745 break;
5746 }
5747
5748 case TypeCheckKind::kUnresolvedCheck:
5749 case TypeCheckKind::kInterfaceCheck: {
5750 // Note that we indeed only call on slow path, but we always go
5751 // into the slow path for the unresolved and interface check
5752 // cases.
5753 //
5754 // We cannot directly call the InstanceofNonTrivial runtime
5755 // entry point without resorting to a type checking slow path
5756 // here (i.e. by calling InvokeRuntime directly), as it would
5757 // require to assign fixed registers for the inputs of this
5758 // HInstanceOf instruction (following the runtime calling
5759 // convention), which might be cluttered by the potential first
5760 // read barrier emission at the beginning of this method.
5761 //
5762 // TODO: Introduce a new runtime entry point taking the object
5763 // to test (instead of its class) as argument, and let it deal
5764 // with the read barrier issues. This will let us refactor this
5765 // case of the `switch` code as it was previously (with a direct
5766 // call to the runtime not using a type checking slow path).
5767 // This should also be beneficial for the other cases above.
5768 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005769 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5770 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005771 codegen_->AddSlowPath(slow_path);
5772 __ Bc(slow_path->GetEntryLabel());
5773 break;
5774 }
Vladimir Marko175e7862018-03-27 09:03:13 +00005775
5776 case TypeCheckKind::kBitstringCheck: {
5777 // /* HeapReference<Class> */ temp = obj->klass_
5778 GenerateReferenceLoadTwoRegisters(instruction,
5779 out_loc,
5780 obj_loc,
5781 class_offset,
5782 maybe_temp_loc,
5783 kWithoutReadBarrier);
5784
5785 GenerateBitstringTypeCheckCompare(instruction, out);
5786 __ Sltiu(out, out, 1);
5787 break;
5788 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005789 }
5790
5791 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005792
5793 if (slow_path != nullptr) {
5794 __ Bind(slow_path->GetExitLabel());
5795 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005796}
5797
5798void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005799 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005800 locations->SetOut(Location::ConstantLocation(constant));
5801}
5802
5803void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5804 // Will be generated at use site.
5805}
5806
5807void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005808 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005809 locations->SetOut(Location::ConstantLocation(constant));
5810}
5811
5812void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5813 // Will be generated at use site.
5814}
5815
Calin Juravle175dc732015-08-25 15:42:32 +01005816void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5817 // The trampoline uses the same calling convention as dex calling conventions,
5818 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5819 // the method_idx.
5820 HandleInvoke(invoke);
5821}
5822
5823void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5824 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5825}
5826
Alexey Frunze4dda3372015-06-01 18:31:49 -07005827void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5828 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5829 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5830}
5831
5832void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5833 HandleInvoke(invoke);
5834 // The register T0 is required to be used for the hidden argument in
5835 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5836 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5837}
5838
5839void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5840 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5841 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005842 Location receiver = invoke->GetLocations()->InAt(0);
5843 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005844 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005845
5846 // Set the hidden argument.
5847 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5848 invoke->GetDexMethodIndex());
5849
5850 // temp = object->GetClass();
5851 if (receiver.IsStackSlot()) {
5852 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5853 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5854 } else {
5855 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5856 }
5857 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005858 // Instead of simply (possibly) unpoisoning `temp` here, we should
5859 // emit a read barrier for the previous class reference load.
5860 // However this is not required in practice, as this is an
5861 // intermediate/temporary reference and because the current
5862 // concurrent copying collector keeps the from-space memory
5863 // intact/accessible until the end of the marking phase (the
5864 // concurrent copying collector may not in the future).
5865 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005866 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5867 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5868 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005869 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005870 // temp = temp->GetImtEntryAt(method_offset);
5871 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5872 // T9 = temp->GetEntryPoint();
5873 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5874 // T9();
5875 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005876 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005877 DCHECK(!codegen_->IsLeafMethod());
5878 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5879}
5880
5881void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005882 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5883 if (intrinsic.TryDispatch(invoke)) {
5884 return;
5885 }
5886
Alexey Frunze4dda3372015-06-01 18:31:49 -07005887 HandleInvoke(invoke);
5888}
5889
5890void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005891 // Explicit clinit checks triggered by static invokes must have been pruned by
5892 // art::PrepareForRegisterAllocation.
5893 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005894
Chris Larsen3039e382015-08-26 07:54:08 -07005895 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5896 if (intrinsic.TryDispatch(invoke)) {
5897 return;
5898 }
5899
Alexey Frunze4dda3372015-06-01 18:31:49 -07005900 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005901}
5902
Orion Hodsonac141392017-01-13 11:53:47 +00005903void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5904 HandleInvoke(invoke);
5905}
5906
5907void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5908 codegen_->GenerateInvokePolymorphicCall(invoke);
5909}
5910
Orion Hodson4c8e12e2018-05-18 08:33:20 +01005911void LocationsBuilderMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5912 HandleInvoke(invoke);
5913}
5914
5915void InstructionCodeGeneratorMIPS64::VisitInvokeCustom(HInvokeCustom* invoke) {
5916 codegen_->GenerateInvokeCustomCall(invoke);
5917}
5918
Chris Larsen3039e382015-08-26 07:54:08 -07005919static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005920 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005921 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5922 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005923 return true;
5924 }
5925 return false;
5926}
5927
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005928HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005929 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005930 bool fallback_load = false;
5931 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005932 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005933 case HLoadString::LoadKind::kBootImageRelRo:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005934 case HLoadString::LoadKind::kBssEntry:
5935 DCHECK(!Runtime::Current()->UseJitCompilation());
5936 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005937 case HLoadString::LoadKind::kJitTableAddress:
5938 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005939 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005940 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005941 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005942 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005943 }
5944 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005945 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005946 }
5947 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005948}
5949
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005950HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5951 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005952 bool fallback_load = false;
5953 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005954 case HLoadClass::LoadKind::kInvalid:
5955 LOG(FATAL) << "UNREACHABLE";
5956 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005957 case HLoadClass::LoadKind::kReferrersClass:
5958 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005959 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoe47f60c2018-02-21 13:43:28 +00005960 case HLoadClass::LoadKind::kBootImageRelRo:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005961 case HLoadClass::LoadKind::kBssEntry:
5962 DCHECK(!Runtime::Current()->UseJitCompilation());
5963 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005964 case HLoadClass::LoadKind::kJitTableAddress:
5965 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005966 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005967 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005968 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005969 break;
5970 }
5971 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005972 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005973 }
5974 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005975}
5976
Vladimir Markodc151b22015-10-15 18:02:30 +01005977HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5978 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005979 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005980 // On MIPS64 we support all dispatch types.
5981 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005982}
5983
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005984void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5985 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005986 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005987 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005988 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5989 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5990
Alexey Frunze19f6c692016-11-30 19:19:55 -08005991 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005992 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005993 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005994 uint32_t offset =
5995 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005996 __ LoadFromOffset(kLoadDoubleword,
5997 temp.AsRegister<GpuRegister>(),
5998 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005999 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00006000 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01006001 }
Vladimir Marko58155012015-08-19 12:49:41 +00006002 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00006003 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00006004 break;
Vladimir Marko65979462017-05-19 17:25:12 +01006005 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
6006 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006007 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006008 NewBootImageMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006009 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006010 NewBootImageMethodPatch(invoke->GetTargetMethod(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006011 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01006012 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6013 break;
6014 }
Vladimir Marko58155012015-08-19 12:49:41 +00006015 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006016 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
6017 kLoadDoubleword,
6018 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00006019 break;
Vladimir Markob066d432018-01-03 13:14:37 +00006020 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageRelRo: {
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006021 uint32_t boot_image_offset = GetBootImageOffset(invoke);
Vladimir Markob066d432018-01-03 13:14:37 +00006022 PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset);
6023 PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high);
6024 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6025 // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load.
6026 __ Lwu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6027 break;
6028 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006029 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006030 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01006031 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006032 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
6033 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
6034 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08006035 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
6036 break;
6037 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006038 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
6039 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
6040 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07006041 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006042 }
6043
Alexey Frunze19f6c692016-11-30 19:19:55 -08006044 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00006045 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08006046 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00006047 break;
Vladimir Marko58155012015-08-19 12:49:41 +00006048 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
6049 // T9 = callee_method->entry_point_from_quick_compiled_code_;
6050 __ LoadFromOffset(kLoadDoubleword,
6051 T9,
6052 callee_method.AsRegister<GpuRegister>(),
6053 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07006054 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00006055 // T9()
6056 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006057 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00006058 break;
6059 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006060 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
6061
Alexey Frunze4dda3372015-06-01 18:31:49 -07006062 DCHECK(!IsLeafMethod());
6063}
6064
6065void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00006066 // Explicit clinit checks triggered by static invokes must have been pruned by
6067 // art::PrepareForRegisterAllocation.
6068 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006069
6070 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6071 return;
6072 }
6073
6074 LocationSummary* locations = invoke->GetLocations();
6075 codegen_->GenerateStaticOrDirectCall(invoke,
6076 locations->HasTemps()
6077 ? locations->GetTemp(0)
6078 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006079}
6080
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006081void CodeGeneratorMIPS64::GenerateVirtualCall(
6082 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006083 // Use the calling convention instead of the location of the receiver, as
6084 // intrinsics may have put the receiver in a different register. In the intrinsics
6085 // slow path, the arguments have been moved to the right place, so here we are
6086 // guaranteed that the receiver is the first register of the calling convention.
6087 InvokeDexCallingConvention calling_convention;
6088 GpuRegister receiver = calling_convention.GetRegisterAt(0);
6089
Alexey Frunze53afca12015-11-05 16:34:23 -08006090 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006091 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
6092 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
6093 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006094 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006095
6096 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00006097 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08006098 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006099 // Instead of simply (possibly) unpoisoning `temp` here, we should
6100 // emit a read barrier for the previous class reference load.
6101 // However this is not required in practice, as this is an
6102 // intermediate/temporary reference and because the current
6103 // concurrent copying collector keeps the from-space memory
6104 // intact/accessible until the end of the marking phase (the
6105 // concurrent copying collector may not in the future).
6106 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006107 // temp = temp->GetMethodAt(method_offset);
6108 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6109 // T9 = temp->GetEntryPoint();
6110 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6111 // T9();
6112 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006113 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006114 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006115}
6116
6117void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6118 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6119 return;
6120 }
6121
6122 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006123 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006124}
6125
6126void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006127 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006128 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006129 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006130 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6131 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006132 return;
6133 }
Vladimir Marko41559982017-01-06 14:04:23 +00006134 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006135
Alexey Frunze15958152017-02-09 19:08:30 -08006136 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6137 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006138 ? LocationSummary::kCallOnSlowPath
6139 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006140 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006141 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6142 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6143 }
Vladimir Marko41559982017-01-06 14:04:23 +00006144 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006145 locations->SetInAt(0, Location::RequiresRegister());
6146 }
6147 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006148 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6149 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6150 // Rely on the type resolution or initialization and marking to save everything we need.
6151 RegisterSet caller_saves = RegisterSet::Empty();
6152 InvokeRuntimeCallingConvention calling_convention;
6153 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6154 locations->SetCustomSlowPathCallerSaves(caller_saves);
6155 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006156 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006157 }
6158 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006159}
6160
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006161// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6162// move.
6163void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006164 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006165 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006166 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006167 return;
6168 }
Vladimir Marko41559982017-01-06 14:04:23 +00006169 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006170
Vladimir Marko41559982017-01-06 14:04:23 +00006171 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006172 Location out_loc = locations->Out();
6173 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6174 GpuRegister current_method_reg = ZERO;
6175 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006176 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006177 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6178 }
6179
Alexey Frunze15958152017-02-09 19:08:30 -08006180 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6181 ? kWithoutReadBarrier
6182 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006183 bool generate_null_check = false;
6184 switch (load_kind) {
6185 case HLoadClass::LoadKind::kReferrersClass:
6186 DCHECK(!cls->CanCallRuntime());
6187 DCHECK(!cls->MustGenerateClinitCheck());
6188 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6189 GenerateGcRootFieldLoad(cls,
6190 out_loc,
6191 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006192 ArtMethod::DeclaringClassOffset().Int32Value(),
6193 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006194 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006195 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006196 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006197 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006198 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006199 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006200 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006201 codegen_->NewBootImageTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006202 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006203 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6204 break;
6205 }
6206 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006207 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006208 uint32_t address = dchecked_integral_cast<uint32_t>(
6209 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6210 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006211 __ LoadLiteral(out,
6212 kLoadUnsignedWord,
6213 codegen_->DeduplicateBootImageAddressLiteral(address));
6214 break;
6215 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006216 case HLoadClass::LoadKind::kBootImageRelRo: {
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006217 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006218 uint32_t boot_image_offset = codegen_->GetBootImageOffset(cls);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006219 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006220 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006221 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006222 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006223 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6224 __ Lwu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006225 break;
6226 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006227 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006228 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6229 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006230 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6231 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006232 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006233 GenerateGcRootFieldLoad(cls,
6234 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006235 out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006236 /* placeholder */ 0x5678,
6237 read_barrier_option,
6238 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006239 generate_null_check = true;
6240 break;
6241 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006242 case HLoadClass::LoadKind::kJitTableAddress:
6243 __ LoadLiteral(out,
6244 kLoadUnsignedWord,
6245 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6246 cls->GetTypeIndex(),
6247 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006248 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006249 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006250 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006251 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006252 LOG(FATAL) << "UNREACHABLE";
6253 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006254 }
6255
6256 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6257 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006258 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00006259 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006260 codegen_->AddSlowPath(slow_path);
6261 if (generate_null_check) {
6262 __ Beqzc(out, slow_path->GetEntryLabel());
6263 }
6264 if (cls->MustGenerateClinitCheck()) {
6265 GenerateClassInitializationCheck(slow_path, out);
6266 } else {
6267 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006268 }
6269 }
6270}
6271
Orion Hodsondbaa5c72018-05-10 08:22:46 +01006272void LocationsBuilderMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6273 InvokeRuntimeCallingConvention calling_convention;
6274 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6275 CodeGenerator::CreateLoadMethodHandleRuntimeCallLocationSummary(load, loc, loc);
6276}
6277
6278void InstructionCodeGeneratorMIPS64::VisitLoadMethodHandle(HLoadMethodHandle* load) {
6279 codegen_->GenerateLoadMethodHandleRuntimeCall(load);
6280}
6281
Orion Hodson18259d72018-04-12 11:18:23 +01006282void LocationsBuilderMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6283 InvokeRuntimeCallingConvention calling_convention;
6284 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6285 CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, loc, loc);
6286}
6287
6288void InstructionCodeGeneratorMIPS64::VisitLoadMethodType(HLoadMethodType* load) {
6289 codegen_->GenerateLoadMethodTypeRuntimeCall(load);
6290}
6291
David Brazdilcb1c0552015-08-04 16:22:25 +01006292static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006293 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006294}
6295
Alexey Frunze4dda3372015-06-01 18:31:49 -07006296void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6297 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006298 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006299 locations->SetOut(Location::RequiresRegister());
6300}
6301
6302void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6303 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006304 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6305}
6306
6307void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006308 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006309}
6310
6311void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6312 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006313}
6314
Alexey Frunze4dda3372015-06-01 18:31:49 -07006315void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006316 HLoadString::LoadKind load_kind = load->GetLoadKind();
6317 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006318 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006319 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006320 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006321 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006322 } else {
6323 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006324 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6325 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6326 // Rely on the pResolveString and marking to save everything we need.
6327 RegisterSet caller_saves = RegisterSet::Empty();
6328 InvokeRuntimeCallingConvention calling_convention;
6329 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6330 locations->SetCustomSlowPathCallerSaves(caller_saves);
6331 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006332 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006333 }
6334 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006335 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006336}
6337
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006338// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6339// move.
6340void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006341 HLoadString::LoadKind load_kind = load->GetLoadKind();
6342 LocationSummary* locations = load->GetLocations();
6343 Location out_loc = locations->Out();
6344 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6345
6346 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006347 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6348 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006349 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006350 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006351 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Marko59eb30f2018-02-20 11:52:34 +00006352 codegen_->NewBootImageStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006353 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006354 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006355 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006356 }
6357 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006358 uint32_t address = dchecked_integral_cast<uint32_t>(
6359 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6360 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006361 __ LoadLiteral(out,
6362 kLoadUnsignedWord,
6363 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006364 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006365 }
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006366 case HLoadString::LoadKind::kBootImageRelRo: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006367 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006368 uint32_t boot_image_offset = codegen_->GetBootImageOffset(load);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006369 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006370 codegen_->NewBootImageRelRoPatch(boot_image_offset);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006371 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
Vladimir Markoe47f60c2018-02-21 13:43:28 +00006372 codegen_->NewBootImageRelRoPatch(boot_image_offset, info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006373 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6374 __ Lwu(out, AT, /* placeholder */ 0x5678);
6375 return;
6376 }
6377 case HLoadString::LoadKind::kBssEntry: {
6378 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6379 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6380 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6381 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6382 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006383 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006384 GenerateGcRootFieldLoad(load,
6385 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006386 out,
Alexey Frunze15958152017-02-09 19:08:30 -08006387 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006388 kCompilerReadBarrierOption,
6389 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006390 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006391 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006392 codegen_->AddSlowPath(slow_path);
6393 __ Beqzc(out, slow_path->GetEntryLabel());
6394 __ Bind(slow_path->GetExitLabel());
6395 return;
6396 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006397 case HLoadString::LoadKind::kJitTableAddress:
6398 __ LoadLiteral(out,
6399 kLoadUnsignedWord,
6400 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6401 load->GetStringIndex(),
6402 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006403 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006404 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006405 default:
6406 break;
6407 }
6408
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006409 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006410 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006411 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006412 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006413 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6414 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6415 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006416}
6417
Alexey Frunze4dda3372015-06-01 18:31:49 -07006418void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006419 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006420 locations->SetOut(Location::ConstantLocation(constant));
6421}
6422
6423void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6424 // Will be generated at use site.
6425}
6426
6427void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006428 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6429 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006430 InvokeRuntimeCallingConvention calling_convention;
6431 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6432}
6433
6434void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006435 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006436 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006437 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006438 if (instruction->IsEnter()) {
6439 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6440 } else {
6441 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6442 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006443}
6444
6445void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6446 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006447 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006448 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006449 case DataType::Type::kInt32:
6450 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006451 locations->SetInAt(0, Location::RequiresRegister());
6452 locations->SetInAt(1, Location::RequiresRegister());
6453 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6454 break;
6455
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006456 case DataType::Type::kFloat32:
6457 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006458 locations->SetInAt(0, Location::RequiresFpuRegister());
6459 locations->SetInAt(1, Location::RequiresFpuRegister());
6460 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6461 break;
6462
6463 default:
6464 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6465 }
6466}
6467
6468void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006469 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006470 LocationSummary* locations = instruction->GetLocations();
6471
6472 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006473 case DataType::Type::kInt32:
6474 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006475 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6476 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6477 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006478 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006479 __ MulR6(dst, lhs, rhs);
6480 else
6481 __ Dmul(dst, lhs, rhs);
6482 break;
6483 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006484 case DataType::Type::kFloat32:
6485 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006486 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6487 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6488 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006489 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006490 __ MulS(dst, lhs, rhs);
6491 else
6492 __ MulD(dst, lhs, rhs);
6493 break;
6494 }
6495 default:
6496 LOG(FATAL) << "Unexpected mul type " << type;
6497 }
6498}
6499
6500void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6501 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006502 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006503 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006504 case DataType::Type::kInt32:
6505 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006506 locations->SetInAt(0, Location::RequiresRegister());
6507 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6508 break;
6509
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006510 case DataType::Type::kFloat32:
6511 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006512 locations->SetInAt(0, Location::RequiresFpuRegister());
6513 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6514 break;
6515
6516 default:
6517 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6518 }
6519}
6520
6521void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006522 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006523 LocationSummary* locations = instruction->GetLocations();
6524
6525 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006526 case DataType::Type::kInt32:
6527 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006528 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6529 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006530 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006531 __ Subu(dst, ZERO, src);
6532 else
6533 __ Dsubu(dst, ZERO, src);
6534 break;
6535 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006536 case DataType::Type::kFloat32:
6537 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006538 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6539 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006540 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006541 __ NegS(dst, src);
6542 else
6543 __ NegD(dst, src);
6544 break;
6545 }
6546 default:
6547 LOG(FATAL) << "Unexpected neg type " << type;
6548 }
6549}
6550
6551void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006552 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6553 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006554 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006555 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006556 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6557 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006558}
6559
6560void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006561 // Note: if heap poisoning is enabled, the entry point takes care
6562 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006563 QuickEntrypointEnum entrypoint =
6564 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6565 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006566 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006567 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006568}
6569
6570void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006571 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6572 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006573 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006574 if (instruction->IsStringAlloc()) {
6575 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6576 } else {
6577 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006578 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006579 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006580}
6581
6582void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006583 // Note: if heap poisoning is enabled, the entry point takes care
6584 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006585 if (instruction->IsStringAlloc()) {
6586 // String is allocated through StringFactory. Call NewEmptyString entry point.
6587 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006588 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006589 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006590 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6591 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6592 __ Jalr(T9);
6593 __ Nop();
6594 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6595 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006596 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006597 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006598 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006599}
6600
6601void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006602 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006603 locations->SetInAt(0, Location::RequiresRegister());
6604 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6605}
6606
6607void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006608 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006609 LocationSummary* locations = instruction->GetLocations();
6610
6611 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006612 case DataType::Type::kInt32:
6613 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006614 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6615 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6616 __ Nor(dst, src, ZERO);
6617 break;
6618 }
6619
6620 default:
6621 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6622 }
6623}
6624
6625void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006626 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006627 locations->SetInAt(0, Location::RequiresRegister());
6628 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6629}
6630
6631void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6632 LocationSummary* locations = instruction->GetLocations();
6633 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6634 locations->InAt(0).AsRegister<GpuRegister>(),
6635 1);
6636}
6637
6638void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006639 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6640 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006641}
6642
Calin Juravle2ae48182016-03-16 14:05:09 +00006643void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6644 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006645 return;
6646 }
6647 Location obj = instruction->GetLocations()->InAt(0);
6648
6649 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006650 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006651}
6652
Calin Juravle2ae48182016-03-16 14:05:09 +00006653void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006654 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006655 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006656 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006657
6658 Location obj = instruction->GetLocations()->InAt(0);
6659
6660 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6661}
6662
6663void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006664 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006665}
6666
6667void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6668 HandleBinaryOp(instruction);
6669}
6670
6671void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6672 HandleBinaryOp(instruction);
6673}
6674
6675void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6676 LOG(FATAL) << "Unreachable";
6677}
6678
6679void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006680 if (instruction->GetNext()->IsSuspendCheck() &&
6681 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6682 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6683 // The back edge will generate the suspend check.
6684 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6685 }
6686
Alexey Frunze4dda3372015-06-01 18:31:49 -07006687 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6688}
6689
6690void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006691 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006692 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6693 if (location.IsStackSlot()) {
6694 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6695 } else if (location.IsDoubleStackSlot()) {
6696 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6697 }
6698 locations->SetOut(location);
6699}
6700
6701void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6702 ATTRIBUTE_UNUSED) {
6703 // Nothing to do, the parameter is already at its location.
6704}
6705
6706void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6707 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006708 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006709 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6710}
6711
6712void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6713 ATTRIBUTE_UNUSED) {
6714 // Nothing to do, the method is already at its location.
6715}
6716
6717void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006718 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006719 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006720 locations->SetInAt(i, Location::Any());
6721 }
6722 locations->SetOut(Location::Any());
6723}
6724
6725void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6726 LOG(FATAL) << "Unreachable";
6727}
6728
6729void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006730 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006731 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006732 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6733 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006734 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006735
6736 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006737 case DataType::Type::kInt32:
6738 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006739 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006740 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006741 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6742 break;
6743
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006744 case DataType::Type::kFloat32:
6745 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006746 InvokeRuntimeCallingConvention calling_convention;
6747 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6748 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6749 locations->SetOut(calling_convention.GetReturnLocation(type));
6750 break;
6751 }
6752
6753 default:
6754 LOG(FATAL) << "Unexpected rem type " << type;
6755 }
6756}
6757
6758void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006759 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006760
6761 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006762 case DataType::Type::kInt32:
6763 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006764 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006765 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006766
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006767 case DataType::Type::kFloat32:
6768 case DataType::Type::kFloat64: {
6769 QuickEntrypointEnum entrypoint =
6770 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006771 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006772 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006773 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6774 } else {
6775 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6776 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006777 break;
6778 }
6779 default:
6780 LOG(FATAL) << "Unexpected rem type " << type;
6781 }
6782}
6783
Aart Bik1f8d51b2018-02-15 10:42:37 -08006784static void CreateMinMaxLocations(ArenaAllocator* allocator, HBinaryOperation* minmax) {
6785 LocationSummary* locations = new (allocator) LocationSummary(minmax);
6786 switch (minmax->GetResultType()) {
6787 case DataType::Type::kInt32:
6788 case DataType::Type::kInt64:
6789 locations->SetInAt(0, Location::RequiresRegister());
6790 locations->SetInAt(1, Location::RequiresRegister());
6791 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6792 break;
6793 case DataType::Type::kFloat32:
6794 case DataType::Type::kFloat64:
6795 locations->SetInAt(0, Location::RequiresFpuRegister());
6796 locations->SetInAt(1, Location::RequiresFpuRegister());
6797 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6798 break;
6799 default:
6800 LOG(FATAL) << "Unexpected type for HMinMax " << minmax->GetResultType();
6801 }
6802}
6803
Aart Bik351df3e2018-03-07 11:54:57 -08006804void InstructionCodeGeneratorMIPS64::GenerateMinMaxInt(LocationSummary* locations, bool is_min) {
Aart Bik1f8d51b2018-02-15 10:42:37 -08006805 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6806 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
6807 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6808
6809 if (lhs == rhs) {
6810 if (out != lhs) {
6811 __ Move(out, lhs);
6812 }
6813 } else {
6814 // Some architectures, such as ARM and MIPS (prior to r6), have a
6815 // conditional move instruction which only changes the target
6816 // (output) register if the condition is true (MIPS prior to r6 had
6817 // MOVF, MOVT, and MOVZ). The SELEQZ and SELNEZ instructions always
6818 // change the target (output) register. If the condition is true the
6819 // output register gets the contents of the "rs" register; otherwise,
6820 // the output register is set to zero. One consequence of this is
6821 // that to implement something like "rd = c==0 ? rs : rt" MIPS64r6
6822 // needs to use a pair of SELEQZ/SELNEZ instructions. After
6823 // executing this pair of instructions one of the output registers
6824 // from the pair will necessarily contain zero. Then the code ORs the
6825 // output registers from the SELEQZ/SELNEZ instructions to get the
6826 // final result.
6827 //
6828 // The initial test to see if the output register is same as the
6829 // first input register is needed to make sure that value in the
6830 // first input register isn't clobbered before we've finished
6831 // computing the output value. The logic in the corresponding else
6832 // clause performs the same task but makes sure the second input
6833 // register isn't clobbered in the event that it's the same register
6834 // as the output register; the else clause also handles the case
6835 // where the output register is distinct from both the first, and the
6836 // second input registers.
6837 if (out == lhs) {
6838 __ Slt(AT, rhs, lhs);
6839 if (is_min) {
6840 __ Seleqz(out, lhs, AT);
6841 __ Selnez(AT, rhs, AT);
6842 } else {
6843 __ Selnez(out, lhs, AT);
6844 __ Seleqz(AT, rhs, AT);
6845 }
6846 } else {
6847 __ Slt(AT, lhs, rhs);
6848 if (is_min) {
6849 __ Seleqz(out, rhs, AT);
6850 __ Selnez(AT, lhs, AT);
6851 } else {
6852 __ Selnez(out, rhs, AT);
6853 __ Seleqz(AT, lhs, AT);
6854 }
6855 }
6856 __ Or(out, out, AT);
6857 }
6858}
6859
6860void InstructionCodeGeneratorMIPS64::GenerateMinMaxFP(LocationSummary* locations,
6861 bool is_min,
6862 DataType::Type type) {
6863 FpuRegister a = locations->InAt(0).AsFpuRegister<FpuRegister>();
6864 FpuRegister b = locations->InAt(1).AsFpuRegister<FpuRegister>();
6865 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
6866
6867 Mips64Label noNaNs;
6868 Mips64Label done;
6869 FpuRegister ftmp = ((out != a) && (out != b)) ? out : FTMP;
6870
6871 // When Java computes min/max it prefers a NaN to a number; the
6872 // behavior of MIPSR6 is to prefer numbers to NaNs, i.e., if one of
6873 // the inputs is a NaN and the other is a valid number, the MIPS
6874 // instruction will return the number; Java wants the NaN value
6875 // returned. This is why there is extra logic preceding the use of
6876 // the MIPS min.fmt/max.fmt instructions. If either a, or b holds a
6877 // NaN, return the NaN, otherwise return the min/max.
6878 if (type == DataType::Type::kFloat64) {
6879 __ CmpUnD(FTMP, a, b);
6880 __ Bc1eqz(FTMP, &noNaNs);
6881
6882 // One of the inputs is a NaN
6883 __ CmpEqD(ftmp, a, a);
6884 // If a == a then b is the NaN, otherwise a is the NaN.
6885 __ SelD(ftmp, a, b);
6886
6887 if (ftmp != out) {
6888 __ MovD(out, ftmp);
6889 }
6890
6891 __ Bc(&done);
6892
6893 __ Bind(&noNaNs);
6894
6895 if (is_min) {
6896 __ MinD(out, a, b);
6897 } else {
6898 __ MaxD(out, a, b);
6899 }
6900 } else {
6901 DCHECK_EQ(type, DataType::Type::kFloat32);
6902 __ CmpUnS(FTMP, a, b);
6903 __ Bc1eqz(FTMP, &noNaNs);
6904
6905 // One of the inputs is a NaN
6906 __ CmpEqS(ftmp, a, a);
6907 // If a == a then b is the NaN, otherwise a is the NaN.
6908 __ SelS(ftmp, a, b);
6909
6910 if (ftmp != out) {
6911 __ MovS(out, ftmp);
6912 }
6913
6914 __ Bc(&done);
6915
6916 __ Bind(&noNaNs);
6917
6918 if (is_min) {
6919 __ MinS(out, a, b);
6920 } else {
6921 __ MaxS(out, a, b);
6922 }
6923 }
6924
6925 __ Bind(&done);
6926}
6927
Aart Bik351df3e2018-03-07 11:54:57 -08006928void InstructionCodeGeneratorMIPS64::GenerateMinMax(HBinaryOperation* minmax, bool is_min) {
6929 DataType::Type type = minmax->GetResultType();
6930 switch (type) {
6931 case DataType::Type::kInt32:
6932 case DataType::Type::kInt64:
6933 GenerateMinMaxInt(minmax->GetLocations(), is_min);
6934 break;
6935 case DataType::Type::kFloat32:
6936 case DataType::Type::kFloat64:
6937 GenerateMinMaxFP(minmax->GetLocations(), is_min, type);
6938 break;
6939 default:
6940 LOG(FATAL) << "Unexpected type for HMinMax " << type;
6941 }
6942}
6943
Aart Bik1f8d51b2018-02-15 10:42:37 -08006944void LocationsBuilderMIPS64::VisitMin(HMin* min) {
6945 CreateMinMaxLocations(GetGraph()->GetAllocator(), min);
6946}
6947
6948void InstructionCodeGeneratorMIPS64::VisitMin(HMin* min) {
Aart Bik351df3e2018-03-07 11:54:57 -08006949 GenerateMinMax(min, /*is_min*/ true);
Aart Bik1f8d51b2018-02-15 10:42:37 -08006950}
6951
6952void LocationsBuilderMIPS64::VisitMax(HMax* max) {
6953 CreateMinMaxLocations(GetGraph()->GetAllocator(), max);
6954}
6955
6956void InstructionCodeGeneratorMIPS64::VisitMax(HMax* max) {
Aart Bik351df3e2018-03-07 11:54:57 -08006957 GenerateMinMax(max, /*is_min*/ false);
Aart Bik1f8d51b2018-02-15 10:42:37 -08006958}
6959
Aart Bik3dad3412018-02-28 12:01:46 -08006960void LocationsBuilderMIPS64::VisitAbs(HAbs* abs) {
6961 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(abs);
6962 switch (abs->GetResultType()) {
6963 case DataType::Type::kInt32:
6964 case DataType::Type::kInt64:
6965 locations->SetInAt(0, Location::RequiresRegister());
6966 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6967 break;
6968 case DataType::Type::kFloat32:
6969 case DataType::Type::kFloat64:
6970 locations->SetInAt(0, Location::RequiresFpuRegister());
6971 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6972 break;
6973 default:
6974 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
6975 }
6976}
6977
6978void InstructionCodeGeneratorMIPS64::VisitAbs(HAbs* abs) {
6979 LocationSummary* locations = abs->GetLocations();
6980 switch (abs->GetResultType()) {
6981 case DataType::Type::kInt32: {
6982 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
6983 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6984 __ Sra(AT, in, 31);
6985 __ Xor(out, in, AT);
6986 __ Subu(out, out, AT);
6987 break;
6988 }
6989 case DataType::Type::kInt64: {
6990 GpuRegister in = locations->InAt(0).AsRegister<GpuRegister>();
6991 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
6992 __ Dsra32(AT, in, 31);
6993 __ Xor(out, in, AT);
6994 __ Dsubu(out, out, AT);
6995 break;
6996 }
6997 case DataType::Type::kFloat32: {
6998 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
6999 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7000 __ AbsS(out, in);
7001 break;
7002 }
7003 case DataType::Type::kFloat64: {
7004 FpuRegister in = locations->InAt(0).AsFpuRegister<FpuRegister>();
7005 FpuRegister out = locations->Out().AsFpuRegister<FpuRegister>();
7006 __ AbsD(out, in);
7007 break;
7008 }
7009 default:
7010 LOG(FATAL) << "Unexpected abs type " << abs->GetResultType();
7011 }
7012}
7013
Igor Murashkind01745e2017-04-05 16:40:31 -07007014void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
7015 constructor_fence->SetLocations(nullptr);
7016}
7017
7018void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
7019 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
7020 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
7021}
7022
Alexey Frunze4dda3372015-06-01 18:31:49 -07007023void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7024 memory_barrier->SetLocations(nullptr);
7025}
7026
7027void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
7028 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
7029}
7030
7031void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007032 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007033 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007034 locations->SetInAt(0, Mips64ReturnLocation(return_type));
7035}
7036
7037void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
7038 codegen_->GenerateFrameExit();
7039}
7040
7041void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
7042 ret->SetLocations(nullptr);
7043}
7044
7045void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
7046 codegen_->GenerateFrameExit();
7047}
7048
Alexey Frunze92d90602015-12-18 18:16:36 -08007049void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
7050 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007051}
7052
Alexey Frunze92d90602015-12-18 18:16:36 -08007053void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
7054 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00007055}
7056
Alexey Frunze4dda3372015-06-01 18:31:49 -07007057void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
7058 HandleShift(shl);
7059}
7060
7061void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
7062 HandleShift(shl);
7063}
7064
7065void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
7066 HandleShift(shr);
7067}
7068
7069void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
7070 HandleShift(shr);
7071}
7072
Alexey Frunze4dda3372015-06-01 18:31:49 -07007073void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
7074 HandleBinaryOp(instruction);
7075}
7076
7077void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
7078 HandleBinaryOp(instruction);
7079}
7080
7081void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7082 HandleFieldGet(instruction, instruction->GetFieldInfo());
7083}
7084
7085void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
7086 HandleFieldGet(instruction, instruction->GetFieldInfo());
7087}
7088
7089void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
7090 HandleFieldSet(instruction, instruction->GetFieldInfo());
7091}
7092
7093void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01007094 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007095}
7096
Calin Juravlee460d1d2015-09-29 04:52:17 +01007097void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
7098 HUnresolvedInstanceFieldGet* instruction) {
7099 FieldAccessCallingConventionMIPS64 calling_convention;
7100 codegen_->CreateUnresolvedFieldLocationSummary(
7101 instruction, instruction->GetFieldType(), calling_convention);
7102}
7103
7104void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
7105 HUnresolvedInstanceFieldGet* instruction) {
7106 FieldAccessCallingConventionMIPS64 calling_convention;
7107 codegen_->GenerateUnresolvedFieldAccess(instruction,
7108 instruction->GetFieldType(),
7109 instruction->GetFieldIndex(),
7110 instruction->GetDexPc(),
7111 calling_convention);
7112}
7113
7114void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
7115 HUnresolvedInstanceFieldSet* instruction) {
7116 FieldAccessCallingConventionMIPS64 calling_convention;
7117 codegen_->CreateUnresolvedFieldLocationSummary(
7118 instruction, instruction->GetFieldType(), calling_convention);
7119}
7120
7121void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
7122 HUnresolvedInstanceFieldSet* instruction) {
7123 FieldAccessCallingConventionMIPS64 calling_convention;
7124 codegen_->GenerateUnresolvedFieldAccess(instruction,
7125 instruction->GetFieldType(),
7126 instruction->GetFieldIndex(),
7127 instruction->GetDexPc(),
7128 calling_convention);
7129}
7130
7131void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
7132 HUnresolvedStaticFieldGet* instruction) {
7133 FieldAccessCallingConventionMIPS64 calling_convention;
7134 codegen_->CreateUnresolvedFieldLocationSummary(
7135 instruction, instruction->GetFieldType(), calling_convention);
7136}
7137
7138void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
7139 HUnresolvedStaticFieldGet* instruction) {
7140 FieldAccessCallingConventionMIPS64 calling_convention;
7141 codegen_->GenerateUnresolvedFieldAccess(instruction,
7142 instruction->GetFieldType(),
7143 instruction->GetFieldIndex(),
7144 instruction->GetDexPc(),
7145 calling_convention);
7146}
7147
7148void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
7149 HUnresolvedStaticFieldSet* instruction) {
7150 FieldAccessCallingConventionMIPS64 calling_convention;
7151 codegen_->CreateUnresolvedFieldLocationSummary(
7152 instruction, instruction->GetFieldType(), calling_convention);
7153}
7154
7155void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
7156 HUnresolvedStaticFieldSet* instruction) {
7157 FieldAccessCallingConventionMIPS64 calling_convention;
7158 codegen_->GenerateUnresolvedFieldAccess(instruction,
7159 instruction->GetFieldType(),
7160 instruction->GetFieldIndex(),
7161 instruction->GetDexPc(),
7162 calling_convention);
7163}
7164
Alexey Frunze4dda3372015-06-01 18:31:49 -07007165void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007166 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7167 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02007168 // In suspend check slow path, usually there are no caller-save registers at all.
7169 // If SIMD instructions are present, however, we force spilling all live SIMD
7170 // registers in full width (since the runtime only saves/restores lower part).
7171 locations->SetCustomSlowPathCallerSaves(
7172 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007173}
7174
7175void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
7176 HBasicBlock* block = instruction->GetBlock();
7177 if (block->GetLoopInformation() != nullptr) {
7178 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
7179 // The back edge will generate the suspend check.
7180 return;
7181 }
7182 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
7183 // The goto will generate the suspend check.
7184 return;
7185 }
7186 GenerateSuspendCheck(instruction, nullptr);
7187}
7188
Alexey Frunze4dda3372015-06-01 18:31:49 -07007189void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01007190 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
7191 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007192 InvokeRuntimeCallingConvention calling_convention;
7193 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7194}
7195
7196void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01007197 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007198 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
7199}
7200
7201void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007202 DataType::Type input_type = conversion->GetInputType();
7203 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007204 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7205 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007206
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007207 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
7208 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007209 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
7210 }
7211
Vladimir Markoca6fff82017-10-03 14:49:14 +01007212 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007213
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007214 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007215 locations->SetInAt(0, Location::RequiresFpuRegister());
7216 } else {
7217 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07007218 }
7219
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007220 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007221 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007222 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007223 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007224 }
7225}
7226
7227void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
7228 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007229 DataType::Type result_type = conversion->GetResultType();
7230 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07007231
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007232 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
7233 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07007234
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007235 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007236 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7237 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
7238
7239 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007240 case DataType::Type::kUint8:
7241 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007242 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007243 case DataType::Type::kInt8:
7244 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007245 // Type conversion from long to types narrower than int is a result of code
7246 // transformations. To avoid unpredictable results for SEB and SEH, we first
7247 // need to sign-extend the low 32-bit value into bits 32 through 63.
7248 __ Sll(dst, src, 0);
7249 __ Seb(dst, dst);
7250 } else {
7251 __ Seb(dst, src);
7252 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007253 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01007254 case DataType::Type::kUint16:
7255 __ Andi(dst, src, 0xFFFF);
7256 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007257 case DataType::Type::kInt16:
7258 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00007259 // Type conversion from long to types narrower than int is a result of code
7260 // transformations. To avoid unpredictable results for SEB and SEH, we first
7261 // need to sign-extend the low 32-bit value into bits 32 through 63.
7262 __ Sll(dst, src, 0);
7263 __ Seh(dst, dst);
7264 } else {
7265 __ Seh(dst, src);
7266 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007267 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007268 case DataType::Type::kInt32:
7269 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007270 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
7271 // conversions, except when the input and output registers are the same and we are not
7272 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007273 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01007274 __ Sll(dst, src, 0);
7275 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007276 break;
7277
7278 default:
7279 LOG(FATAL) << "Unexpected type conversion from " << input_type
7280 << " to " << result_type;
7281 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007282 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007283 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7284 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007285 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007286 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007287 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007288 __ Cvtsl(dst, FTMP);
7289 } else {
7290 __ Cvtdl(dst, FTMP);
7291 }
7292 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007293 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007294 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007295 __ Cvtsw(dst, FTMP);
7296 } else {
7297 __ Cvtdw(dst, FTMP);
7298 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07007299 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007300 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
7301 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007302 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
7303 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007304
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007305 if (result_type == DataType::Type::kInt64) {
7306 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007307 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007308 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007309 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007310 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007311 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007312 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007313 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007314 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007315 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007316 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00007317 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08007318 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00007319 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007320 } else if (DataType::IsFloatingPointType(result_type) &&
7321 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007322 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
7323 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01007324 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07007325 __ Cvtsd(dst, src);
7326 } else {
7327 __ Cvtds(dst, src);
7328 }
7329 } else {
7330 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
7331 << " to " << result_type;
7332 }
7333}
7334
7335void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
7336 HandleShift(ushr);
7337}
7338
7339void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
7340 HandleShift(ushr);
7341}
7342
7343void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
7344 HandleBinaryOp(instruction);
7345}
7346
7347void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
7348 HandleBinaryOp(instruction);
7349}
7350
7351void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7352 // Nothing to do, this should be removed during prepare for register allocator.
7353 LOG(FATAL) << "Unreachable";
7354}
7355
7356void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7357 // Nothing to do, this should be removed during prepare for register allocator.
7358 LOG(FATAL) << "Unreachable";
7359}
7360
7361void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007362 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007363}
7364
7365void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007366 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007367}
7368
7369void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007370 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007371}
7372
7373void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007374 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007375}
7376
7377void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007378 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007379}
7380
7381void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007382 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007383}
7384
7385void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007386 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007387}
7388
7389void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007390 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007391}
7392
7393void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007394 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007395}
7396
7397void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007398 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007399}
7400
7401void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007402 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007403}
7404
7405void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007406 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007407}
7408
Aart Bike9f37602015-10-09 11:15:55 -07007409void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007410 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007411}
7412
7413void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007414 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007415}
7416
7417void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007418 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007419}
7420
7421void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007422 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007423}
7424
7425void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007426 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007427}
7428
7429void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007430 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007431}
7432
7433void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007434 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007435}
7436
7437void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007438 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007439}
7440
Mark Mendellfe57faa2015-09-18 09:26:15 -04007441// Simple implementation of packed switch - generate cascaded compare/jumps.
7442void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7443 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007444 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007445 locations->SetInAt(0, Location::RequiresRegister());
7446}
7447
Alexey Frunze0960ac52016-12-20 17:24:59 -08007448void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7449 int32_t lower_bound,
7450 uint32_t num_entries,
7451 HBasicBlock* switch_block,
7452 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007453 // Create a set of compare/jumps.
7454 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007455 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007456 // Jump to default if index is negative
7457 // Note: We don't check the case that index is positive while value < lower_bound, because in
7458 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7459 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7460
Alexey Frunze0960ac52016-12-20 17:24:59 -08007461 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007462 // Jump to successors[0] if value == lower_bound.
7463 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7464 int32_t last_index = 0;
7465 for (; num_entries - last_index > 2; last_index += 2) {
7466 __ Addiu(temp_reg, temp_reg, -2);
7467 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7468 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7469 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7470 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7471 }
7472 if (num_entries - last_index == 2) {
7473 // The last missing case_value.
7474 __ Addiu(temp_reg, temp_reg, -1);
7475 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007476 }
7477
7478 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007479 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007480 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007481 }
7482}
7483
Alexey Frunze0960ac52016-12-20 17:24:59 -08007484void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7485 int32_t lower_bound,
7486 uint32_t num_entries,
7487 HBasicBlock* switch_block,
7488 HBasicBlock* default_block) {
7489 // Create a jump table.
7490 std::vector<Mips64Label*> labels(num_entries);
7491 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7492 for (uint32_t i = 0; i < num_entries; i++) {
7493 labels[i] = codegen_->GetLabelOf(successors[i]);
7494 }
7495 JumpTable* table = __ CreateJumpTable(std::move(labels));
7496
7497 // Is the value in range?
7498 __ Addiu32(TMP, value_reg, -lower_bound);
7499 __ LoadConst32(AT, num_entries);
7500 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7501
7502 // We are in the range of the table.
7503 // Load the target address from the jump table, indexing by the value.
7504 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007505 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007506 __ Lw(TMP, TMP, 0);
7507 // Compute the absolute target address by adding the table start address
7508 // (the table contains offsets to targets relative to its start).
7509 __ Daddu(TMP, TMP, AT);
7510 // And jump.
7511 __ Jr(TMP);
7512 __ Nop();
7513}
7514
7515void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7516 int32_t lower_bound = switch_instr->GetStartValue();
7517 uint32_t num_entries = switch_instr->GetNumEntries();
7518 LocationSummary* locations = switch_instr->GetLocations();
7519 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7520 HBasicBlock* switch_block = switch_instr->GetBlock();
7521 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7522
7523 if (num_entries > kPackedSwitchJumpTableThreshold) {
7524 GenTableBasedPackedSwitch(value_reg,
7525 lower_bound,
7526 num_entries,
7527 switch_block,
7528 default_block);
7529 } else {
7530 GenPackedSwitchWithCompares(value_reg,
7531 lower_bound,
7532 num_entries,
7533 switch_block,
7534 default_block);
7535 }
7536}
7537
Chris Larsenc9905a62017-03-13 17:06:18 -07007538void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7539 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007540 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007541 locations->SetInAt(0, Location::RequiresRegister());
7542 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007543}
7544
Chris Larsenc9905a62017-03-13 17:06:18 -07007545void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7546 LocationSummary* locations = instruction->GetLocations();
7547 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7548 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7549 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7550 __ LoadFromOffset(kLoadDoubleword,
7551 locations->Out().AsRegister<GpuRegister>(),
7552 locations->InAt(0).AsRegister<GpuRegister>(),
7553 method_offset);
7554 } else {
7555 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7556 instruction->GetIndex(), kMips64PointerSize));
7557 __ LoadFromOffset(kLoadDoubleword,
7558 locations->Out().AsRegister<GpuRegister>(),
7559 locations->InAt(0).AsRegister<GpuRegister>(),
7560 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7561 __ LoadFromOffset(kLoadDoubleword,
7562 locations->Out().AsRegister<GpuRegister>(),
7563 locations->Out().AsRegister<GpuRegister>(),
7564 method_offset);
7565 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007566}
7567
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007568void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7569 ATTRIBUTE_UNUSED) {
7570 LOG(FATAL) << "Unreachable";
7571}
7572
7573void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7574 ATTRIBUTE_UNUSED) {
7575 LOG(FATAL) << "Unreachable";
7576}
7577
Alexey Frunze4dda3372015-06-01 18:31:49 -07007578} // namespace mips64
7579} // namespace art