blob: 8a06061c6a41983acb486d0064f852a222712e92 [file] [log] [blame]
Alexey Frunze4dda3372015-06-01 18:31:49 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips64.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips64/asm_support_mips64.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070020#include "art_method.h"
Vladimir Marko94ec2db2017-09-06 17:21:03 +010021#include "class_table.h"
Alexey Frunzec857c742015-09-23 15:12:39 -070022#include "code_generator_utils.h"
Alexey Frunze19f6c692016-11-30 19:19:55 -080023#include "compiled_method.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
Andreas Gampe09659c22017-09-18 18:23:32 -070027#include "heap_poisoning.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070028#include "intrinsics.h"
Chris Larsen3039e382015-08-26 07:54:08 -070029#include "intrinsics_mips64.h"
Vladimir Markod8dbc8d2017-09-20 13:37:47 +010030#include "linker/linker_patch.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070031#include "mirror/array-inl.h"
32#include "mirror/class-inl.h"
33#include "offsets.h"
Vladimir Marko174b2e22017-10-12 13:34:49 +010034#include "stack_map_stream.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070035#include "thread.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070036#include "utils/assembler.h"
Alexey Frunzea0e87b02015-09-24 22:57:20 -070037#include "utils/mips64/assembler_mips64.h"
Alexey Frunze4dda3372015-06-01 18:31:49 -070038#include "utils/stack_checks.h"
39
40namespace art {
41namespace mips64 {
42
43static constexpr int kCurrentMethodStackOffset = 0;
44static constexpr GpuRegister kMethodRegisterArgument = A0;
45
Alexey Frunze4147fcc2017-06-17 19:57:27 -070046// Flags controlling the use of thunks for Baker read barriers.
47constexpr bool kBakerReadBarrierThunksEnableForFields = true;
48constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
49constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
50
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010051Location Mips64ReturnLocation(DataType::Type return_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070052 switch (return_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010053 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +010054 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010055 case DataType::Type::kInt8:
56 case DataType::Type::kUint16:
57 case DataType::Type::kInt16:
Aart Bik66c158e2018-01-31 12:55:04 -080058 case DataType::Type::kUint32:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010059 case DataType::Type::kInt32:
60 case DataType::Type::kReference:
Aart Bik66c158e2018-01-31 12:55:04 -080061 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010062 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070063 return Location::RegisterLocation(V0);
64
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010065 case DataType::Type::kFloat32:
66 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -070067 return Location::FpuRegisterLocation(F0);
68
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010069 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -070070 return Location();
71 }
72 UNREACHABLE();
73}
74
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010075Location InvokeDexCallingConventionVisitorMIPS64::GetReturnLocation(DataType::Type type) const {
Alexey Frunze4dda3372015-06-01 18:31:49 -070076 return Mips64ReturnLocation(type);
77}
78
79Location InvokeDexCallingConventionVisitorMIPS64::GetMethodLocation() const {
80 return Location::RegisterLocation(kMethodRegisterArgument);
81}
82
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010083Location InvokeDexCallingConventionVisitorMIPS64::GetNextLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070084 Location next_location;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010085 if (type == DataType::Type::kVoid) {
Alexey Frunze4dda3372015-06-01 18:31:49 -070086 LOG(FATAL) << "Unexpected parameter type " << type;
87 }
88
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010089 if (DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070090 (float_index_ < calling_convention.GetNumberOfFpuRegisters())) {
91 next_location = Location::FpuRegisterLocation(
92 calling_convention.GetFpuRegisterAt(float_index_++));
93 gp_index_++;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +010094 } else if (!DataType::IsFloatingPointType(type) &&
Alexey Frunze4dda3372015-06-01 18:31:49 -070095 (gp_index_ < calling_convention.GetNumberOfRegisters())) {
96 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index_++));
97 float_index_++;
98 } else {
99 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100100 next_location = DataType::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
101 : Location::StackSlot(stack_offset);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700102 }
103
104 // Space on the stack is reserved for all arguments.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100105 stack_index_ += DataType::Is64BitType(type) ? 2 : 1;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700106
Alexey Frunze4dda3372015-06-01 18:31:49 -0700107 return next_location;
108}
109
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100110Location InvokeRuntimeCallingConvention::GetReturnLocation(DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700111 return Mips64ReturnLocation(type);
112}
113
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100114// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
115#define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700116#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700117
118class BoundsCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
119 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000120 explicit BoundsCheckSlowPathMIPS64(HBoundsCheck* instruction) : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700121
122 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100123 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700124 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
125 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000126 if (instruction_->CanThrowIntoCatchBlock()) {
127 // Live registers will be restored in the catch block if caught.
128 SaveLiveRegisters(codegen, instruction_->GetLocations());
129 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700130 // We're moving two locations to locations that could overlap, so we need a parallel
131 // move resolver.
132 InvokeRuntimeCallingConvention calling_convention;
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100133 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700134 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100135 DataType::Type::kInt32,
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100136 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700137 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100138 DataType::Type::kInt32);
Serban Constantinescufc734082016-07-19 17:18:07 +0100139 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
140 ? kQuickThrowStringBounds
141 : kQuickThrowArrayBounds;
142 mips64_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100143 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700144 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
145 }
146
Alexandre Rames8158f282015-08-07 10:26:17 +0100147 bool IsFatal() const OVERRIDE { return true; }
148
Roland Levillain46648892015-06-19 16:07:18 +0100149 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
150
Alexey Frunze4dda3372015-06-01 18:31:49 -0700151 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700152 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS64);
153};
154
155class DivZeroCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
156 public:
Alexey Frunzec61c0762017-04-10 13:54:23 -0700157 explicit DivZeroCheckSlowPathMIPS64(HDivZeroCheck* instruction)
158 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
162 __ Bind(GetEntryLabel());
Serban Constantinescufc734082016-07-19 17:18:07 +0100163 mips64_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700164 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
165 }
166
Alexandre Rames8158f282015-08-07 10:26:17 +0100167 bool IsFatal() const OVERRIDE { return true; }
168
Roland Levillain46648892015-06-19 16:07:18 +0100169 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
170
Alexey Frunze4dda3372015-06-01 18:31:49 -0700171 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700172 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS64);
173};
174
175class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
176 public:
177 LoadClassSlowPathMIPS64(HLoadClass* cls,
178 HInstruction* at,
179 uint32_t dex_pc,
Vladimir Markof3c52b42017-11-17 17:32:12 +0000180 bool do_clinit)
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700181 : SlowPathCodeMIPS64(at),
182 cls_(cls),
183 dex_pc_(dex_pc),
Vladimir Markof3c52b42017-11-17 17:32:12 +0000184 do_clinit_(do_clinit) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700185 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
186 }
187
188 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000189 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700190 Location out = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700191 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700192 InvokeRuntimeCallingConvention calling_convention;
193 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700194 __ Bind(GetEntryLabel());
195 SaveLiveRegisters(codegen, locations);
196
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000197 dex::TypeIndex type_index = cls_->GetTypeIndex();
198 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100199 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
200 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000201 mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700202 if (do_clinit_) {
203 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
204 } else {
205 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
206 }
207
208 // Move the class to the desired location.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700209 if (out.IsValid()) {
210 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100211 DataType::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700212 mips64_codegen->MoveLocation(out,
213 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
214 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700215 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700216 RestoreLiveRegisters(codegen, locations);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700218 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700219 }
220
Roland Levillain46648892015-06-19 16:07:18 +0100221 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS64"; }
222
Alexey Frunze4dda3372015-06-01 18:31:49 -0700223 private:
224 // The class this slow path will load.
225 HLoadClass* const cls_;
226
Alexey Frunze4dda3372015-06-01 18:31:49 -0700227 // The dex PC of `at_`.
228 const uint32_t dex_pc_;
229
230 // Whether to initialize the class.
231 const bool do_clinit_;
232
233 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS64);
234};
235
236class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
237 public:
Vladimir Markof3c52b42017-11-17 17:32:12 +0000238 explicit LoadStringSlowPathMIPS64(HLoadString* instruction)
239 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700240
241 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700242 DCHECK(instruction_->IsLoadString());
243 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700244 LocationSummary* locations = instruction_->GetLocations();
245 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Vladimir Markof3c52b42017-11-17 17:32:12 +0000246 const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700247 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700248 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700249 __ Bind(GetEntryLabel());
250 SaveLiveRegisters(codegen, locations);
251
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000252 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufc734082016-07-19 17:18:07 +0100253 mips64_codegen->InvokeRuntime(kQuickResolveString,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700254 instruction_,
255 instruction_->GetDexPc(),
256 this);
257 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700258
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100259 DataType::Type type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700260 mips64_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700261 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700262 type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700263 RestoreLiveRegisters(codegen, locations);
Alexey Frunzef63f5692016-12-13 17:43:11 -0800264
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700265 __ Bc(GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700266 }
267
Roland Levillain46648892015-06-19 16:07:18 +0100268 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS64"; }
269
Alexey Frunze4dda3372015-06-01 18:31:49 -0700270 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700271 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS64);
272};
273
274class NullCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
275 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000276 explicit NullCheckSlowPathMIPS64(HNullCheck* instr) : SlowPathCodeMIPS64(instr) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700277
278 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
279 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
280 __ Bind(GetEntryLabel());
David Brazdil77a48ae2015-09-15 12:34:04 +0000281 if (instruction_->CanThrowIntoCatchBlock()) {
282 // Live registers will be restored in the catch block if caught.
283 SaveLiveRegisters(codegen, instruction_->GetLocations());
284 }
Serban Constantinescufc734082016-07-19 17:18:07 +0100285 mips64_codegen->InvokeRuntime(kQuickThrowNullPointer,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700286 instruction_,
287 instruction_->GetDexPc(),
288 this);
289 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
290 }
291
Alexandre Rames8158f282015-08-07 10:26:17 +0100292 bool IsFatal() const OVERRIDE { return true; }
293
Roland Levillain46648892015-06-19 16:07:18 +0100294 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
295
Alexey Frunze4dda3372015-06-01 18:31:49 -0700296 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700297 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS64);
298};
299
300class SuspendCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
301 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100302 SuspendCheckSlowPathMIPS64(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000303 : SlowPathCodeMIPS64(instruction), successor_(successor) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700304
305 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200306 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700307 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
308 __ Bind(GetEntryLabel());
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200309 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufc734082016-07-19 17:18:07 +0100310 mips64_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700311 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +0200312 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Alexey Frunze4dda3372015-06-01 18:31:49 -0700313 if (successor_ == nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700314 __ Bc(GetReturnLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -0700315 } else {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700316 __ Bc(mips64_codegen->GetLabelOf(successor_));
Alexey Frunze4dda3372015-06-01 18:31:49 -0700317 }
318 }
319
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700320 Mips64Label* GetReturnLabel() {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700321 DCHECK(successor_ == nullptr);
322 return &return_label_;
323 }
324
Roland Levillain46648892015-06-19 16:07:18 +0100325 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS64"; }
326
Chris Larsena2045912017-11-02 12:39:54 -0700327 HBasicBlock* GetSuccessor() const {
328 return successor_;
329 }
330
Alexey Frunze4dda3372015-06-01 18:31:49 -0700331 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700332 // If not null, the block to branch to after the suspend check.
333 HBasicBlock* const successor_;
334
335 // If `successor_` is null, the label to branch to after the suspend check.
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700336 Mips64Label return_label_;
Alexey Frunze4dda3372015-06-01 18:31:49 -0700337
338 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS64);
339};
340
341class TypeCheckSlowPathMIPS64 : public SlowPathCodeMIPS64 {
342 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800343 explicit TypeCheckSlowPathMIPS64(HInstruction* instruction, bool is_fatal)
344 : SlowPathCodeMIPS64(instruction), is_fatal_(is_fatal) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700345
346 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
347 LocationSummary* locations = instruction_->GetLocations();
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800348
Serban Constantinescu5a6cc492015-08-13 15:20:25 +0100349 uint32_t dex_pc = instruction_->GetDexPc();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700350 DCHECK(instruction_->IsCheckCast()
351 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
352 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
353
354 __ Bind(GetEntryLabel());
Alexey Frunzedfc30af2018-01-24 16:25:10 -0800355 if (!is_fatal_ || instruction_->CanThrowIntoCatchBlock()) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800356 SaveLiveRegisters(codegen, locations);
357 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700358
359 // We're moving two locations to locations that could overlap, so we need a parallel
360 // move resolver.
361 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800362 codegen->EmitParallelMoves(locations->InAt(0),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700363 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100364 DataType::Type::kReference,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800365 locations->InAt(1),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700366 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100367 DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700368 if (instruction_->IsInstanceOf()) {
Serban Constantinescufc734082016-07-19 17:18:07 +0100369 mips64_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800370 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100371 DataType::Type ret_type = instruction_->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700372 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
373 mips64_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700374 } else {
375 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800376 mips64_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
377 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700378 }
379
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800380 if (!is_fatal_) {
381 RestoreLiveRegisters(codegen, locations);
382 __ Bc(GetExitLabel());
383 }
Alexey Frunze4dda3372015-06-01 18:31:49 -0700384 }
385
Roland Levillain46648892015-06-19 16:07:18 +0100386 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS64"; }
387
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800388 bool IsFatal() const OVERRIDE { return is_fatal_; }
389
Alexey Frunze4dda3372015-06-01 18:31:49 -0700390 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800391 const bool is_fatal_;
392
Alexey Frunze4dda3372015-06-01 18:31:49 -0700393 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS64);
394};
395
396class DeoptimizationSlowPathMIPS64 : public SlowPathCodeMIPS64 {
397 public:
Aart Bik42249c32016-01-07 15:33:50 -0800398 explicit DeoptimizationSlowPathMIPS64(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000399 : SlowPathCodeMIPS64(instruction) {}
Alexey Frunze4dda3372015-06-01 18:31:49 -0700400
401 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800402 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
Alexey Frunze4dda3372015-06-01 18:31:49 -0700403 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100404 LocationSummary* locations = instruction_->GetLocations();
405 SaveLiveRegisters(codegen, locations);
406 InvokeRuntimeCallingConvention calling_convention;
407 __ LoadConst32(calling_convention.GetRegisterAt(0),
408 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufc734082016-07-19 17:18:07 +0100409 mips64_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100410 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Alexey Frunze4dda3372015-06-01 18:31:49 -0700411 }
412
Roland Levillain46648892015-06-19 16:07:18 +0100413 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS64"; }
414
Alexey Frunze4dda3372015-06-01 18:31:49 -0700415 private:
Alexey Frunze4dda3372015-06-01 18:31:49 -0700416 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS64);
417};
418
Alexey Frunze15958152017-02-09 19:08:30 -0800419class ArraySetSlowPathMIPS64 : public SlowPathCodeMIPS64 {
420 public:
421 explicit ArraySetSlowPathMIPS64(HInstruction* instruction) : SlowPathCodeMIPS64(instruction) {}
422
423 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
424 LocationSummary* locations = instruction_->GetLocations();
425 __ Bind(GetEntryLabel());
426 SaveLiveRegisters(codegen, locations);
427
428 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100429 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800430 parallel_move.AddMove(
431 locations->InAt(0),
432 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100433 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800434 nullptr);
435 parallel_move.AddMove(
436 locations->InAt(1),
437 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100438 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800439 nullptr);
440 parallel_move.AddMove(
441 locations->InAt(2),
442 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100443 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800444 nullptr);
445 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
446
447 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
448 mips64_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
449 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
450 RestoreLiveRegisters(codegen, locations);
451 __ Bc(GetExitLabel());
452 }
453
454 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS64"; }
455
456 private:
457 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS64);
458};
459
460// Slow path marking an object reference `ref` during a read
461// barrier. The field `obj.field` in the object `obj` holding this
462// reference does not get updated by this slow path after marking (see
463// ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 below for that).
464//
465// This means that after the execution of this slow path, `ref` will
466// always be up-to-date, but `obj.field` may not; i.e., after the
467// flip, `ref` will be a to-space reference, but `obj.field` will
468// probably still be a from-space reference (unless it gets updated by
469// another thread, or if another thread installed another object
470// reference (different from `ref`) in `obj.field`).
471//
472// If `entrypoint` is a valid location it is assumed to already be
473// holding the entrypoint. The case where the entrypoint is passed in
474// is for the GcRoot read barrier.
475class ReadBarrierMarkSlowPathMIPS64 : public SlowPathCodeMIPS64 {
476 public:
477 ReadBarrierMarkSlowPathMIPS64(HInstruction* instruction,
478 Location ref,
479 Location entrypoint = Location::NoLocation())
480 : SlowPathCodeMIPS64(instruction), ref_(ref), entrypoint_(entrypoint) {
481 DCHECK(kEmitCompilerReadBarrier);
482 }
483
484 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
485
486 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
487 LocationSummary* locations = instruction_->GetLocations();
488 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
489 DCHECK(locations->CanCall());
490 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
491 DCHECK(instruction_->IsInstanceFieldGet() ||
492 instruction_->IsStaticFieldGet() ||
493 instruction_->IsArrayGet() ||
494 instruction_->IsArraySet() ||
495 instruction_->IsLoadClass() ||
496 instruction_->IsLoadString() ||
497 instruction_->IsInstanceOf() ||
498 instruction_->IsCheckCast() ||
499 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
500 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
501 << "Unexpected instruction in read barrier marking slow path: "
502 << instruction_->DebugName();
503
504 __ Bind(GetEntryLabel());
505 // No need to save live registers; it's taken care of by the
506 // entrypoint. Also, there is no need to update the stack mask,
507 // as this runtime call will not trigger a garbage collection.
508 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
509 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
510 (S2 <= ref_reg && ref_reg <= S7) ||
511 (ref_reg == S8)) << ref_reg;
512 // "Compact" slow path, saving two moves.
513 //
514 // Instead of using the standard runtime calling convention (input
515 // and output in A0 and V0 respectively):
516 //
517 // A0 <- ref
518 // V0 <- ReadBarrierMark(A0)
519 // ref <- V0
520 //
521 // we just use rX (the register containing `ref`) as input and output
522 // of a dedicated entrypoint:
523 //
524 // rX <- ReadBarrierMarkRegX(rX)
525 //
526 if (entrypoint_.IsValid()) {
527 mips64_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
528 DCHECK_EQ(entrypoint_.AsRegister<GpuRegister>(), T9);
529 __ Jalr(entrypoint_.AsRegister<GpuRegister>());
530 __ Nop();
531 } else {
532 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100533 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800534 // This runtime call does not require a stack map.
535 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
536 instruction_,
537 this);
538 }
539 __ Bc(GetExitLabel());
540 }
541
542 private:
543 // The location (register) of the marked object reference.
544 const Location ref_;
545
546 // The location of the entrypoint if already loaded.
547 const Location entrypoint_;
548
549 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS64);
550};
551
552// Slow path marking an object reference `ref` during a read barrier,
553// and if needed, atomically updating the field `obj.field` in the
554// object `obj` holding this reference after marking (contrary to
555// ReadBarrierMarkSlowPathMIPS64 above, which never tries to update
556// `obj.field`).
557//
558// This means that after the execution of this slow path, both `ref`
559// and `obj.field` will be up-to-date; i.e., after the flip, both will
560// hold the same to-space reference (unless another thread installed
561// another object reference (different from `ref`) in `obj.field`).
562class ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 : public SlowPathCodeMIPS64 {
563 public:
564 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(HInstruction* instruction,
565 Location ref,
566 GpuRegister obj,
567 Location field_offset,
568 GpuRegister temp1)
569 : SlowPathCodeMIPS64(instruction),
570 ref_(ref),
571 obj_(obj),
572 field_offset_(field_offset),
573 temp1_(temp1) {
574 DCHECK(kEmitCompilerReadBarrier);
575 }
576
577 const char* GetDescription() const OVERRIDE {
578 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS64";
579 }
580
581 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
582 LocationSummary* locations = instruction_->GetLocations();
583 GpuRegister ref_reg = ref_.AsRegister<GpuRegister>();
584 DCHECK(locations->CanCall());
585 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
586 // This slow path is only used by the UnsafeCASObject intrinsic.
587 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
588 << "Unexpected instruction in read barrier marking and field updating slow path: "
589 << instruction_->DebugName();
590 DCHECK(instruction_->GetLocations()->Intrinsified());
591 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
592 DCHECK(field_offset_.IsRegister()) << field_offset_;
593
594 __ Bind(GetEntryLabel());
595
596 // Save the old reference.
597 // Note that we cannot use AT or TMP to save the old reference, as those
598 // are used by the code that follows, but we need the old reference after
599 // the call to the ReadBarrierMarkRegX entry point.
600 DCHECK_NE(temp1_, AT);
601 DCHECK_NE(temp1_, TMP);
602 __ Move(temp1_, ref_reg);
603
604 // No need to save live registers; it's taken care of by the
605 // entrypoint. Also, there is no need to update the stack mask,
606 // as this runtime call will not trigger a garbage collection.
607 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
608 DCHECK((V0 <= ref_reg && ref_reg <= T2) ||
609 (S2 <= ref_reg && ref_reg <= S7) ||
610 (ref_reg == S8)) << ref_reg;
611 // "Compact" slow path, saving two moves.
612 //
613 // Instead of using the standard runtime calling convention (input
614 // and output in A0 and V0 respectively):
615 //
616 // A0 <- ref
617 // V0 <- ReadBarrierMark(A0)
618 // ref <- V0
619 //
620 // we just use rX (the register containing `ref`) as input and output
621 // of a dedicated entrypoint:
622 //
623 // rX <- ReadBarrierMarkRegX(rX)
624 //
625 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100626 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800627 // This runtime call does not require a stack map.
628 mips64_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
629 instruction_,
630 this);
631
632 // If the new reference is different from the old reference,
633 // update the field in the holder (`*(obj_ + field_offset_)`).
634 //
635 // Note that this field could also hold a different object, if
636 // another thread had concurrently changed it. In that case, the
637 // the compare-and-set (CAS) loop below would abort, leaving the
638 // field as-is.
639 Mips64Label done;
640 __ Beqc(temp1_, ref_reg, &done);
641
642 // Update the the holder's field atomically. This may fail if
643 // mutator updates before us, but it's OK. This is achieved
644 // using a strong compare-and-set (CAS) operation with relaxed
645 // memory synchronization ordering, where the expected value is
646 // the old reference and the desired value is the new reference.
647
648 // Convenience aliases.
649 GpuRegister base = obj_;
650 GpuRegister offset = field_offset_.AsRegister<GpuRegister>();
651 GpuRegister expected = temp1_;
652 GpuRegister value = ref_reg;
653 GpuRegister tmp_ptr = TMP; // Pointer to actual memory.
654 GpuRegister tmp = AT; // Value in memory.
655
656 __ Daddu(tmp_ptr, base, offset);
657
658 if (kPoisonHeapReferences) {
659 __ PoisonHeapReference(expected);
660 // Do not poison `value` if it is the same register as
661 // `expected`, which has just been poisoned.
662 if (value != expected) {
663 __ PoisonHeapReference(value);
664 }
665 }
666
667 // do {
668 // tmp = [r_ptr] - expected;
669 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
670
671 Mips64Label loop_head, exit_loop;
672 __ Bind(&loop_head);
673 __ Ll(tmp, tmp_ptr);
674 // The LL instruction sign-extends the 32-bit value, but
675 // 32-bit references must be zero-extended. Zero-extend `tmp`.
676 __ Dext(tmp, tmp, 0, 32);
677 __ Bnec(tmp, expected, &exit_loop);
678 __ Move(tmp, value);
679 __ Sc(tmp, tmp_ptr);
680 __ Beqzc(tmp, &loop_head);
681 __ Bind(&exit_loop);
682
683 if (kPoisonHeapReferences) {
684 __ UnpoisonHeapReference(expected);
685 // Do not unpoison `value` if it is the same register as
686 // `expected`, which has just been unpoisoned.
687 if (value != expected) {
688 __ UnpoisonHeapReference(value);
689 }
690 }
691
692 __ Bind(&done);
693 __ Bc(GetExitLabel());
694 }
695
696 private:
697 // The location (register) of the marked object reference.
698 const Location ref_;
699 // The register containing the object holding the marked object reference field.
700 const GpuRegister obj_;
701 // The location of the offset of the marked reference field within `obj_`.
702 Location field_offset_;
703
704 const GpuRegister temp1_;
705
706 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS64);
707};
708
709// Slow path generating a read barrier for a heap reference.
710class ReadBarrierForHeapReferenceSlowPathMIPS64 : public SlowPathCodeMIPS64 {
711 public:
712 ReadBarrierForHeapReferenceSlowPathMIPS64(HInstruction* instruction,
713 Location out,
714 Location ref,
715 Location obj,
716 uint32_t offset,
717 Location index)
718 : SlowPathCodeMIPS64(instruction),
719 out_(out),
720 ref_(ref),
721 obj_(obj),
722 offset_(offset),
723 index_(index) {
724 DCHECK(kEmitCompilerReadBarrier);
725 // If `obj` is equal to `out` or `ref`, it means the initial object
726 // has been overwritten by (or after) the heap object reference load
727 // to be instrumented, e.g.:
728 //
729 // __ LoadFromOffset(kLoadWord, out, out, offset);
730 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
731 //
732 // In that case, we have lost the information about the original
733 // object, and the emitted read barrier cannot work properly.
734 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
735 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
736 }
737
738 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
739 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
740 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100741 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800742 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
743 DCHECK(locations->CanCall());
744 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
745 DCHECK(instruction_->IsInstanceFieldGet() ||
746 instruction_->IsStaticFieldGet() ||
747 instruction_->IsArrayGet() ||
748 instruction_->IsInstanceOf() ||
749 instruction_->IsCheckCast() ||
750 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
751 << "Unexpected instruction in read barrier for heap reference slow path: "
752 << instruction_->DebugName();
753
754 __ Bind(GetEntryLabel());
755 SaveLiveRegisters(codegen, locations);
756
757 // We may have to change the index's value, but as `index_` is a
758 // constant member (like other "inputs" of this slow path),
759 // introduce a copy of it, `index`.
760 Location index = index_;
761 if (index_.IsValid()) {
762 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
763 if (instruction_->IsArrayGet()) {
764 // Compute the actual memory offset and store it in `index`.
765 GpuRegister index_reg = index_.AsRegister<GpuRegister>();
766 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
767 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
768 // We are about to change the value of `index_reg` (see the
769 // calls to art::mips64::Mips64Assembler::Sll and
770 // art::mips64::MipsAssembler::Addiu32 below), but it has
771 // not been saved by the previous call to
772 // art::SlowPathCode::SaveLiveRegisters, as it is a
773 // callee-save register --
774 // art::SlowPathCode::SaveLiveRegisters does not consider
775 // callee-save registers, as it has been designed with the
776 // assumption that callee-save registers are supposed to be
777 // handled by the called function. So, as a callee-save
778 // register, `index_reg` _would_ eventually be saved onto
779 // the stack, but it would be too late: we would have
780 // changed its value earlier. Therefore, we manually save
781 // it here into another freely available register,
782 // `free_reg`, chosen of course among the caller-save
783 // registers (as a callee-save `free_reg` register would
784 // exhibit the same problem).
785 //
786 // Note we could have requested a temporary register from
787 // the register allocator instead; but we prefer not to, as
788 // this is a slow path, and we know we can find a
789 // caller-save register that is available.
790 GpuRegister free_reg = FindAvailableCallerSaveRegister(codegen);
791 __ Move(free_reg, index_reg);
792 index_reg = free_reg;
793 index = Location::RegisterLocation(index_reg);
794 } else {
795 // The initial register stored in `index_` has already been
796 // saved in the call to art::SlowPathCode::SaveLiveRegisters
797 // (as it is not a callee-save register), so we can freely
798 // use it.
799 }
800 // Shifting the index value contained in `index_reg` by the scale
801 // factor (2) cannot overflow in practice, as the runtime is
802 // unable to allocate object arrays with a size larger than
803 // 2^26 - 1 (that is, 2^28 - 4 bytes).
804 __ Sll(index_reg, index_reg, TIMES_4);
805 static_assert(
806 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
807 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
808 __ Addiu32(index_reg, index_reg, offset_);
809 } else {
810 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
811 // intrinsics, `index_` is not shifted by a scale factor of 2
812 // (as in the case of ArrayGet), as it is actually an offset
813 // to an object field within an object.
814 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
815 DCHECK(instruction_->GetLocations()->Intrinsified());
816 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
817 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
818 << instruction_->AsInvoke()->GetIntrinsic();
819 DCHECK_EQ(offset_, 0U);
820 DCHECK(index_.IsRegister());
821 }
822 }
823
824 // We're moving two or three locations to locations that could
825 // overlap, so we need a parallel move resolver.
826 InvokeRuntimeCallingConvention calling_convention;
Vladimir Markoca6fff82017-10-03 14:49:14 +0100827 HParallelMove parallel_move(codegen->GetGraph()->GetAllocator());
Alexey Frunze15958152017-02-09 19:08:30 -0800828 parallel_move.AddMove(ref_,
829 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100830 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800831 nullptr);
832 parallel_move.AddMove(obj_,
833 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100834 DataType::Type::kReference,
Alexey Frunze15958152017-02-09 19:08:30 -0800835 nullptr);
836 if (index.IsValid()) {
837 parallel_move.AddMove(index,
838 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100839 DataType::Type::kInt32,
Alexey Frunze15958152017-02-09 19:08:30 -0800840 nullptr);
841 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
842 } else {
843 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
844 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
845 }
846 mips64_codegen->InvokeRuntime(kQuickReadBarrierSlow,
847 instruction_,
848 instruction_->GetDexPc(),
849 this);
850 CheckEntrypointTypes<
851 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
852 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
853
854 RestoreLiveRegisters(codegen, locations);
855 __ Bc(GetExitLabel());
856 }
857
858 const char* GetDescription() const OVERRIDE {
859 return "ReadBarrierForHeapReferenceSlowPathMIPS64";
860 }
861
862 private:
863 GpuRegister FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
864 size_t ref = static_cast<int>(ref_.AsRegister<GpuRegister>());
865 size_t obj = static_cast<int>(obj_.AsRegister<GpuRegister>());
866 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
867 if (i != ref &&
868 i != obj &&
869 !codegen->IsCoreCalleeSaveRegister(i) &&
870 !codegen->IsBlockedCoreRegister(i)) {
871 return static_cast<GpuRegister>(i);
872 }
873 }
874 // We shall never fail to find a free caller-save register, as
875 // there are more than two core caller-save registers on MIPS64
876 // (meaning it is possible to find one which is different from
877 // `ref` and `obj`).
878 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
879 LOG(FATAL) << "Could not find a free caller-save register";
880 UNREACHABLE();
881 }
882
883 const Location out_;
884 const Location ref_;
885 const Location obj_;
886 const uint32_t offset_;
887 // An additional location containing an index to an array.
888 // Only used for HArrayGet and the UnsafeGetObject &
889 // UnsafeGetObjectVolatile intrinsics.
890 const Location index_;
891
892 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS64);
893};
894
895// Slow path generating a read barrier for a GC root.
896class ReadBarrierForRootSlowPathMIPS64 : public SlowPathCodeMIPS64 {
897 public:
898 ReadBarrierForRootSlowPathMIPS64(HInstruction* instruction, Location out, Location root)
899 : SlowPathCodeMIPS64(instruction), out_(out), root_(root) {
900 DCHECK(kEmitCompilerReadBarrier);
901 }
902
903 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
904 LocationSummary* locations = instruction_->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100905 DataType::Type type = DataType::Type::kReference;
Alexey Frunze15958152017-02-09 19:08:30 -0800906 GpuRegister reg_out = out_.AsRegister<GpuRegister>();
907 DCHECK(locations->CanCall());
908 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
909 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
910 << "Unexpected instruction in read barrier for GC root slow path: "
911 << instruction_->DebugName();
912
913 __ Bind(GetEntryLabel());
914 SaveLiveRegisters(codegen, locations);
915
916 InvokeRuntimeCallingConvention calling_convention;
917 CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
918 mips64_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
919 root_,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +0100920 DataType::Type::kReference);
Alexey Frunze15958152017-02-09 19:08:30 -0800921 mips64_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
922 instruction_,
923 instruction_->GetDexPc(),
924 this);
925 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
926 mips64_codegen->MoveLocation(out_, calling_convention.GetReturnLocation(type), type);
927
928 RestoreLiveRegisters(codegen, locations);
929 __ Bc(GetExitLabel());
930 }
931
932 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS64"; }
933
934 private:
935 const Location out_;
936 const Location root_;
937
938 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS64);
939};
940
Alexey Frunze4dda3372015-06-01 18:31:49 -0700941CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
942 const Mips64InstructionSetFeatures& isa_features,
Serban Constantinescuecc43662015-08-13 13:33:12 +0100943 const CompilerOptions& compiler_options,
944 OptimizingCompilerStats* stats)
Alexey Frunze4dda3372015-06-01 18:31:49 -0700945 : CodeGenerator(graph,
946 kNumberOfGpuRegisters,
947 kNumberOfFpuRegisters,
Roland Levillain0d5a2812015-11-13 10:07:31 +0000948 /* number_of_register_pairs */ 0,
Alexey Frunze4dda3372015-06-01 18:31:49 -0700949 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
950 arraysize(kCoreCalleeSaves)),
951 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
952 arraysize(kFpuCalleeSaves)),
Serban Constantinescuecc43662015-08-13 13:33:12 +0100953 compiler_options,
954 stats),
Vladimir Marko225b6462015-09-28 12:17:40 +0100955 block_labels_(nullptr),
Alexey Frunze4dda3372015-06-01 18:31:49 -0700956 location_builder_(graph, this),
957 instruction_visitor_(graph, this),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100958 move_resolver_(graph->GetAllocator(), this),
959 assembler_(graph->GetAllocator(), &isa_features),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800960 isa_features_(isa_features),
Alexey Frunzef63f5692016-12-13 17:43:11 -0800961 uint32_literals_(std::less<uint32_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100962 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze19f6c692016-11-30 19:19:55 -0800963 uint64_literals_(std::less<uint64_t>(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100964 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
965 pc_relative_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
966 method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
967 pc_relative_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
968 type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
969 pc_relative_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
970 string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800971 jit_string_patches_(StringReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100972 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -0800973 jit_class_patches_(TypeReferenceValueComparator(),
Vladimir Markoca6fff82017-10-03 14:49:14 +0100974 graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -0700975 // Save RA (containing the return address) to mimic Quick.
976 AddAllocatedRegister(Location::RegisterLocation(RA));
977}
978
979#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100980// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
981#define __ down_cast<Mips64Assembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700982#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMips64PointerSize, x).Int32Value()
Alexey Frunze4dda3372015-06-01 18:31:49 -0700983
984void CodeGeneratorMIPS64::Finalize(CodeAllocator* allocator) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700985 // Ensure that we fix up branches.
986 __ FinalizeCode();
987
988 // Adjust native pc offsets in stack maps.
Vladimir Marko174b2e22017-10-12 13:34:49 +0100989 StackMapStream* stack_map_stream = GetStackMapStream();
990 for (size_t i = 0, num = stack_map_stream->GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -0800991 uint32_t old_position =
Vladimir Marko33bff252017-11-01 14:35:42 +0000992 stack_map_stream->GetStackMap(i).native_pc_code_offset.Uint32Value(InstructionSet::kMips64);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700993 uint32_t new_position = __ GetAdjustedPosition(old_position);
994 DCHECK_GE(new_position, old_position);
Vladimir Marko174b2e22017-10-12 13:34:49 +0100995 stack_map_stream->SetStackMapNativePcOffset(i, new_position);
Alexey Frunzea0e87b02015-09-24 22:57:20 -0700996 }
997
998 // Adjust pc offsets for the disassembly information.
999 if (disasm_info_ != nullptr) {
1000 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1001 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1002 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1003 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1004 it.second.start = __ GetAdjustedPosition(it.second.start);
1005 it.second.end = __ GetAdjustedPosition(it.second.end);
1006 }
1007 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1008 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1009 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1010 }
1011 }
1012
Alexey Frunze4dda3372015-06-01 18:31:49 -07001013 CodeGenerator::Finalize(allocator);
1014}
1015
1016Mips64Assembler* ParallelMoveResolverMIPS64::GetAssembler() const {
1017 return codegen_->GetAssembler();
1018}
1019
1020void ParallelMoveResolverMIPS64::EmitMove(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001021 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001022 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1023}
1024
1025void ParallelMoveResolverMIPS64::EmitSwap(size_t index) {
Vladimir Marko225b6462015-09-28 12:17:40 +01001026 MoveOperands* move = moves_[index];
Alexey Frunze4dda3372015-06-01 18:31:49 -07001027 codegen_->SwapLocations(move->GetDestination(), move->GetSource(), move->GetType());
1028}
1029
1030void ParallelMoveResolverMIPS64::RestoreScratch(int reg) {
1031 // Pop reg
1032 __ Ld(GpuRegister(reg), SP, 0);
Lazar Trsicd9672662015-09-03 17:33:01 +02001033 __ DecreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001034}
1035
1036void ParallelMoveResolverMIPS64::SpillScratch(int reg) {
1037 // Push reg
Lazar Trsicd9672662015-09-03 17:33:01 +02001038 __ IncreaseFrameSize(kMips64DoublewordSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001039 __ Sd(GpuRegister(reg), SP, 0);
1040}
1041
1042void ParallelMoveResolverMIPS64::Exchange(int index1, int index2, bool double_slot) {
1043 LoadOperandType load_type = double_slot ? kLoadDoubleword : kLoadWord;
1044 StoreOperandType store_type = double_slot ? kStoreDoubleword : kStoreWord;
1045 // Allocate a scratch register other than TMP, if available.
1046 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1047 // automatically unspilled when the scratch scope object is destroyed).
1048 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1049 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
Lazar Trsicd9672662015-09-03 17:33:01 +02001050 int stack_offset = ensure_scratch.IsSpilled() ? kMips64DoublewordSize : 0;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001051 __ LoadFromOffset(load_type,
1052 GpuRegister(ensure_scratch.GetRegister()),
1053 SP,
1054 index1 + stack_offset);
1055 __ LoadFromOffset(load_type,
1056 TMP,
1057 SP,
1058 index2 + stack_offset);
1059 __ StoreToOffset(store_type,
1060 GpuRegister(ensure_scratch.GetRegister()),
1061 SP,
1062 index2 + stack_offset);
1063 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset);
1064}
1065
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001066void ParallelMoveResolverMIPS64::ExchangeQuadSlots(int index1, int index2) {
1067 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, index1);
1068 __ LoadFpuFromOffset(kLoadQuadword, FTMP2, SP, index2);
1069 __ StoreFpuToOffset(kStoreQuadword, FTMP, SP, index2);
1070 __ StoreFpuToOffset(kStoreQuadword, FTMP2, SP, index1);
1071}
1072
Alexey Frunze4dda3372015-06-01 18:31:49 -07001073static dwarf::Reg DWARFReg(GpuRegister reg) {
1074 return dwarf::Reg::Mips64Core(static_cast<int>(reg));
1075}
1076
David Srbeckyba702002016-02-01 18:15:29 +00001077static dwarf::Reg DWARFReg(FpuRegister reg) {
1078 return dwarf::Reg::Mips64Fp(static_cast<int>(reg));
1079}
Alexey Frunze4dda3372015-06-01 18:31:49 -07001080
1081void CodeGeneratorMIPS64::GenerateFrameEntry() {
1082 __ Bind(&frame_entry_label_);
1083
Nicolas Geoffray8d728322018-01-18 22:44:32 +00001084 if (GetCompilerOptions().CountHotnessInCompiledCode()) {
1085 LOG(WARNING) << "Unimplemented hotness update in mips64 backend";
1086 }
1087
Vladimir Marko33bff252017-11-01 14:35:42 +00001088 bool do_overflow_check =
1089 FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kMips64) || !IsLeafMethod();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001090
1091 if (do_overflow_check) {
Vladimir Marko33bff252017-11-01 14:35:42 +00001092 __ LoadFromOffset(
1093 kLoadWord,
1094 ZERO,
1095 SP,
1096 -static_cast<int32_t>(GetStackOverflowReservedBytes(InstructionSet::kMips64)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001097 RecordPcInfo(nullptr, 0);
1098 }
1099
Alexey Frunze4dda3372015-06-01 18:31:49 -07001100 if (HasEmptyFrame()) {
1101 return;
1102 }
1103
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001104 // Make sure the frame size isn't unreasonably large.
Vladimir Marko33bff252017-11-01 14:35:42 +00001105 if (GetFrameSize() > GetStackOverflowReservedBytes(InstructionSet::kMips64)) {
1106 LOG(FATAL) << "Stack frame larger than "
1107 << GetStackOverflowReservedBytes(InstructionSet::kMips64) << " bytes";
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001108 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001109
1110 // Spill callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001111
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001112 uint32_t ofs = GetFrameSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001113 __ IncreaseFrameSize(ofs);
1114
1115 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
1116 GpuRegister reg = kCoreCalleeSaves[i];
1117 if (allocated_registers_.ContainsCoreRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001118 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001119 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001120 __ cfi().RelOffset(DWARFReg(reg), ofs);
1121 }
1122 }
1123
1124 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1125 FpuRegister reg = kFpuCalleeSaves[i];
1126 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
Lazar Trsicd9672662015-09-03 17:33:01 +02001127 ofs -= kMips64DoublewordSize;
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001128 __ StoreFpuToOffset(kStoreDoubleword, reg, SP, ofs);
David Srbeckyba702002016-02-01 18:15:29 +00001129 __ cfi().RelOffset(DWARFReg(reg), ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001130 }
1131 }
1132
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001133 // Save the current method if we need it. Note that we do not
1134 // do this in HCurrentMethod, as the instruction might have been removed
1135 // in the SSA graph.
1136 if (RequiresCurrentMethod()) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001137 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001138 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001139
1140 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1141 // Initialize should_deoptimize flag to 0.
1142 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1143 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001144}
1145
1146void CodeGeneratorMIPS64::GenerateFrameExit() {
1147 __ cfi().RememberState();
1148
Alexey Frunze4dda3372015-06-01 18:31:49 -07001149 if (!HasEmptyFrame()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001150 // Restore callee-saved registers.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001151
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001152 // For better instruction scheduling restore RA before other registers.
1153 uint32_t ofs = GetFrameSize();
1154 for (int i = arraysize(kCoreCalleeSaves) - 1; i >= 0; --i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001155 GpuRegister reg = kCoreCalleeSaves[i];
1156 if (allocated_registers_.ContainsCoreRegister(reg)) {
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001157 ofs -= kMips64DoublewordSize;
1158 __ LoadFromOffset(kLoadDoubleword, reg, SP, ofs);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001159 __ cfi().Restore(DWARFReg(reg));
1160 }
1161 }
1162
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001163 for (int i = arraysize(kFpuCalleeSaves) - 1; i >= 0; --i) {
1164 FpuRegister reg = kFpuCalleeSaves[i];
1165 if (allocated_registers_.ContainsFloatingPointRegister(reg)) {
1166 ofs -= kMips64DoublewordSize;
1167 __ LoadFpuFromOffset(kLoadDoubleword, reg, SP, ofs);
1168 __ cfi().Restore(DWARFReg(reg));
1169 }
1170 }
1171
1172 __ DecreaseFrameSize(GetFrameSize());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001173 }
1174
Alexey Frunzee104d6e2017-03-21 20:16:05 -07001175 __ Jic(RA, 0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001176
1177 __ cfi().RestoreState();
1178 __ cfi().DefCFAOffset(GetFrameSize());
1179}
1180
1181void CodeGeneratorMIPS64::Bind(HBasicBlock* block) {
1182 __ Bind(GetLabelOf(block));
1183}
1184
1185void CodeGeneratorMIPS64::MoveLocation(Location destination,
1186 Location source,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001187 DataType::Type dst_type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001188 if (source.Equals(destination)) {
1189 return;
1190 }
1191
1192 // A valid move can always be inferred from the destination and source
1193 // locations. When moving from and to a register, the argument type can be
1194 // used to generate 32bit instead of 64bit moves.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001195 bool unspecified_type = (dst_type == DataType::Type::kVoid);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001196 DCHECK_EQ(unspecified_type, false);
1197
1198 if (destination.IsRegister() || destination.IsFpuRegister()) {
1199 if (unspecified_type) {
1200 HConstant* src_cst = source.IsConstant() ? source.GetConstant() : nullptr;
1201 if (source.IsStackSlot() ||
1202 (src_cst != nullptr && (src_cst->IsIntConstant()
1203 || src_cst->IsFloatConstant()
1204 || src_cst->IsNullConstant()))) {
1205 // For stack slots and 32bit constants, a 64bit type is appropriate.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001206 dst_type = destination.IsRegister() ? DataType::Type::kInt32 : DataType::Type::kFloat32;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001207 } else {
1208 // If the source is a double stack slot or a 64bit constant, a 64bit
1209 // type is appropriate. Else the source is a register, and since the
1210 // type has not been specified, we chose a 64bit type to force a 64bit
1211 // move.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001212 dst_type = destination.IsRegister() ? DataType::Type::kInt64 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001213 }
1214 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001215 DCHECK((destination.IsFpuRegister() && DataType::IsFloatingPointType(dst_type)) ||
1216 (destination.IsRegister() && !DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001217 if (source.IsStackSlot() || source.IsDoubleStackSlot()) {
1218 // Move to GPR/FPR from stack
1219 LoadOperandType load_type = source.IsStackSlot() ? kLoadWord : kLoadDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001220 if (DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001221 __ LoadFpuFromOffset(load_type,
1222 destination.AsFpuRegister<FpuRegister>(),
1223 SP,
1224 source.GetStackIndex());
1225 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001226 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001227 __ LoadFromOffset(load_type,
1228 destination.AsRegister<GpuRegister>(),
1229 SP,
1230 source.GetStackIndex());
1231 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001232 } else if (source.IsSIMDStackSlot()) {
1233 __ LoadFpuFromOffset(kLoadQuadword,
1234 destination.AsFpuRegister<FpuRegister>(),
1235 SP,
1236 source.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001237 } else if (source.IsConstant()) {
1238 // Move to GPR/FPR from constant
1239 GpuRegister gpr = AT;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001240 if (!DataType::IsFloatingPointType(dst_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001241 gpr = destination.AsRegister<GpuRegister>();
1242 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001243 if (dst_type == DataType::Type::kInt32 || dst_type == DataType::Type::kFloat32) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001244 int32_t value = GetInt32ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001245 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001246 gpr = ZERO;
1247 } else {
1248 __ LoadConst32(gpr, value);
1249 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001250 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001251 int64_t value = GetInt64ValueOf(source.GetConstant()->AsConstant());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001252 if (DataType::IsFloatingPointType(dst_type) && value == 0) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001253 gpr = ZERO;
1254 } else {
1255 __ LoadConst64(gpr, value);
1256 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001257 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001258 if (dst_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001259 __ Mtc1(gpr, destination.AsFpuRegister<FpuRegister>());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001260 } else if (dst_type == DataType::Type::kFloat64) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001261 __ Dmtc1(gpr, destination.AsFpuRegister<FpuRegister>());
1262 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001263 } else if (source.IsRegister()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001264 if (destination.IsRegister()) {
1265 // Move to GPR from GPR
1266 __ Move(destination.AsRegister<GpuRegister>(), source.AsRegister<GpuRegister>());
1267 } else {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001268 DCHECK(destination.IsFpuRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001269 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001270 __ Dmtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1271 } else {
1272 __ Mtc1(source.AsRegister<GpuRegister>(), destination.AsFpuRegister<FpuRegister>());
1273 }
1274 }
1275 } else if (source.IsFpuRegister()) {
1276 if (destination.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001277 if (GetGraph()->HasSIMD()) {
1278 __ MoveV(VectorRegisterFrom(destination),
1279 VectorRegisterFrom(source));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001280 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001281 // Move to FPR from FPR
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001282 if (dst_type == DataType::Type::kFloat32) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001283 __ MovS(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1284 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001285 DCHECK_EQ(dst_type, DataType::Type::kFloat64);
Lena Djokicca8c2952017-05-29 11:31:46 +02001286 __ MovD(destination.AsFpuRegister<FpuRegister>(), source.AsFpuRegister<FpuRegister>());
1287 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001288 }
Calin Juravlee460d1d2015-09-29 04:52:17 +01001289 } else {
1290 DCHECK(destination.IsRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001291 if (DataType::Is64BitType(dst_type)) {
Calin Juravlee460d1d2015-09-29 04:52:17 +01001292 __ Dmfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1293 } else {
1294 __ Mfc1(destination.AsRegister<GpuRegister>(), source.AsFpuRegister<FpuRegister>());
1295 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001296 }
1297 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001298 } else if (destination.IsSIMDStackSlot()) {
1299 if (source.IsFpuRegister()) {
1300 __ StoreFpuToOffset(kStoreQuadword,
1301 source.AsFpuRegister<FpuRegister>(),
1302 SP,
1303 destination.GetStackIndex());
1304 } else {
1305 DCHECK(source.IsSIMDStackSlot());
1306 __ LoadFpuFromOffset(kLoadQuadword,
1307 FTMP,
1308 SP,
1309 source.GetStackIndex());
1310 __ StoreFpuToOffset(kStoreQuadword,
1311 FTMP,
1312 SP,
1313 destination.GetStackIndex());
1314 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001315 } else { // The destination is not a register. It must be a stack slot.
1316 DCHECK(destination.IsStackSlot() || destination.IsDoubleStackSlot());
1317 if (source.IsRegister() || source.IsFpuRegister()) {
1318 if (unspecified_type) {
1319 if (source.IsRegister()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001320 dst_type = destination.IsStackSlot() ? DataType::Type::kInt32 : DataType::Type::kInt64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001321 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001322 dst_type =
1323 destination.IsStackSlot() ? DataType::Type::kFloat32 : DataType::Type::kFloat64;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001324 }
1325 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001326 DCHECK((destination.IsDoubleStackSlot() == DataType::Is64BitType(dst_type)) &&
1327 (source.IsFpuRegister() == DataType::IsFloatingPointType(dst_type)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001328 // Move to stack from GPR/FPR
1329 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
1330 if (source.IsRegister()) {
1331 __ StoreToOffset(store_type,
1332 source.AsRegister<GpuRegister>(),
1333 SP,
1334 destination.GetStackIndex());
1335 } else {
1336 __ StoreFpuToOffset(store_type,
1337 source.AsFpuRegister<FpuRegister>(),
1338 SP,
1339 destination.GetStackIndex());
1340 }
1341 } else if (source.IsConstant()) {
1342 // Move to stack from constant
1343 HConstant* src_cst = source.GetConstant();
1344 StoreOperandType store_type = destination.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001345 GpuRegister gpr = ZERO;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001346 if (destination.IsStackSlot()) {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001347 int32_t value = GetInt32ValueOf(src_cst->AsConstant());
1348 if (value != 0) {
1349 gpr = TMP;
1350 __ LoadConst32(gpr, value);
1351 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001352 } else {
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001353 DCHECK(destination.IsDoubleStackSlot());
1354 int64_t value = GetInt64ValueOf(src_cst->AsConstant());
1355 if (value != 0) {
1356 gpr = TMP;
1357 __ LoadConst64(gpr, value);
1358 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001359 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001360 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001361 } else {
1362 DCHECK(source.IsStackSlot() || source.IsDoubleStackSlot());
1363 DCHECK_EQ(source.IsDoubleStackSlot(), destination.IsDoubleStackSlot());
1364 // Move to stack from stack
1365 if (destination.IsStackSlot()) {
1366 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1367 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1368 } else {
1369 __ LoadFromOffset(kLoadDoubleword, TMP, SP, source.GetStackIndex());
1370 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex());
1371 }
1372 }
1373 }
1374}
1375
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001376void CodeGeneratorMIPS64::SwapLocations(Location loc1, Location loc2, DataType::Type type) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001377 DCHECK(!loc1.IsConstant());
1378 DCHECK(!loc2.IsConstant());
1379
1380 if (loc1.Equals(loc2)) {
1381 return;
1382 }
1383
1384 bool is_slot1 = loc1.IsStackSlot() || loc1.IsDoubleStackSlot();
1385 bool is_slot2 = loc2.IsStackSlot() || loc2.IsDoubleStackSlot();
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001386 bool is_simd1 = loc1.IsSIMDStackSlot();
1387 bool is_simd2 = loc2.IsSIMDStackSlot();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001388 bool is_fp_reg1 = loc1.IsFpuRegister();
1389 bool is_fp_reg2 = loc2.IsFpuRegister();
1390
1391 if (loc2.IsRegister() && loc1.IsRegister()) {
1392 // Swap 2 GPRs
1393 GpuRegister r1 = loc1.AsRegister<GpuRegister>();
1394 GpuRegister r2 = loc2.AsRegister<GpuRegister>();
1395 __ Move(TMP, r2);
1396 __ Move(r2, r1);
1397 __ Move(r1, TMP);
1398 } else if (is_fp_reg2 && is_fp_reg1) {
1399 // Swap 2 FPRs
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001400 if (GetGraph()->HasSIMD()) {
1401 __ MoveV(static_cast<VectorRegister>(FTMP), VectorRegisterFrom(loc1));
1402 __ MoveV(VectorRegisterFrom(loc1), VectorRegisterFrom(loc2));
1403 __ MoveV(VectorRegisterFrom(loc2), static_cast<VectorRegister>(FTMP));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001404 } else {
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001405 FpuRegister r1 = loc1.AsFpuRegister<FpuRegister>();
1406 FpuRegister r2 = loc2.AsFpuRegister<FpuRegister>();
1407 if (type == DataType::Type::kFloat32) {
1408 __ MovS(FTMP, r1);
1409 __ MovS(r1, r2);
1410 __ MovS(r2, FTMP);
1411 } else {
1412 DCHECK_EQ(type, DataType::Type::kFloat64);
1413 __ MovD(FTMP, r1);
1414 __ MovD(r1, r2);
1415 __ MovD(r2, FTMP);
1416 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07001417 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001418 } else if (is_slot1 != is_slot2) {
1419 // Swap GPR/FPR and stack slot
1420 Location reg_loc = is_slot1 ? loc2 : loc1;
1421 Location mem_loc = is_slot1 ? loc1 : loc2;
1422 LoadOperandType load_type = mem_loc.IsStackSlot() ? kLoadWord : kLoadDoubleword;
1423 StoreOperandType store_type = mem_loc.IsStackSlot() ? kStoreWord : kStoreDoubleword;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001424 // TODO: use load_type = kLoadUnsignedWord when type == DataType::Type::kReference.
Alexey Frunze4dda3372015-06-01 18:31:49 -07001425 __ LoadFromOffset(load_type, TMP, SP, mem_loc.GetStackIndex());
1426 if (reg_loc.IsFpuRegister()) {
1427 __ StoreFpuToOffset(store_type,
1428 reg_loc.AsFpuRegister<FpuRegister>(),
1429 SP,
1430 mem_loc.GetStackIndex());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001431 if (mem_loc.IsStackSlot()) {
1432 __ Mtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1433 } else {
1434 DCHECK(mem_loc.IsDoubleStackSlot());
1435 __ Dmtc1(TMP, reg_loc.AsFpuRegister<FpuRegister>());
1436 }
1437 } else {
1438 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex());
1439 __ Move(reg_loc.AsRegister<GpuRegister>(), TMP);
1440 }
1441 } else if (is_slot1 && is_slot2) {
1442 move_resolver_.Exchange(loc1.GetStackIndex(),
1443 loc2.GetStackIndex(),
1444 loc1.IsDoubleStackSlot());
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001445 } else if (is_simd1 && is_simd2) {
1446 move_resolver_.ExchangeQuadSlots(loc1.GetStackIndex(), loc2.GetStackIndex());
1447 } else if ((is_fp_reg1 && is_simd2) || (is_fp_reg2 && is_simd1)) {
1448 Location fp_reg_loc = is_fp_reg1 ? loc1 : loc2;
1449 Location mem_loc = is_fp_reg1 ? loc2 : loc1;
1450 __ LoadFpuFromOffset(kLoadQuadword, FTMP, SP, mem_loc.GetStackIndex());
1451 __ StoreFpuToOffset(kStoreQuadword,
1452 fp_reg_loc.AsFpuRegister<FpuRegister>(),
1453 SP,
1454 mem_loc.GetStackIndex());
1455 __ MoveV(VectorRegisterFrom(fp_reg_loc), static_cast<VectorRegister>(FTMP));
Alexey Frunze4dda3372015-06-01 18:31:49 -07001456 } else {
1457 LOG(FATAL) << "Unimplemented swap between locations " << loc1 << " and " << loc2;
1458 }
1459}
1460
Calin Juravle175dc732015-08-25 15:42:32 +01001461void CodeGeneratorMIPS64::MoveConstant(Location location, int32_t value) {
1462 DCHECK(location.IsRegister());
1463 __ LoadConst32(location.AsRegister<GpuRegister>(), value);
1464}
1465
Calin Juravlee460d1d2015-09-29 04:52:17 +01001466void CodeGeneratorMIPS64::AddLocationAsTemp(Location location, LocationSummary* locations) {
1467 if (location.IsRegister()) {
1468 locations->AddTemp(location);
1469 } else {
1470 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1471 }
1472}
1473
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001474void CodeGeneratorMIPS64::MarkGCCard(GpuRegister object,
1475 GpuRegister value,
1476 bool value_can_be_null) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001477 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001478 GpuRegister card = AT;
1479 GpuRegister temp = TMP;
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001480 if (value_can_be_null) {
1481 __ Beqzc(value, &done);
1482 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001483 __ LoadFromOffset(kLoadDoubleword,
1484 card,
1485 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001486 Thread::CardTableOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001487 __ Dsrl(temp, object, gc::accounting::CardTable::kCardShift);
1488 __ Daddu(temp, card, temp);
1489 __ Sb(card, temp, 0);
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01001490 if (value_can_be_null) {
1491 __ Bind(&done);
1492 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001493}
1494
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001495template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
Alexey Frunze19f6c692016-11-30 19:19:55 -08001496inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches(
1497 const ArenaDeque<PcRelativePatchInfo>& infos,
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001498 ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001499 for (const PcRelativePatchInfo& info : infos) {
1500 const DexFile& dex_file = info.target_dex_file;
1501 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001502 DCHECK(info.label.IsBound());
1503 uint32_t literal_offset = __ GetLabelLocation(&info.label);
1504 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1505 uint32_t pc_rel_offset = __ GetLabelLocation(&info_high.label);
1506 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Alexey Frunze19f6c692016-11-30 19:19:55 -08001507 }
1508}
1509
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001510void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08001511 DCHECK(linker_patches->empty());
1512 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001513 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001514 method_bss_entry_patches_.size() +
Alexey Frunzef63f5692016-12-13 17:43:11 -08001515 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001516 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001517 pc_relative_string_patches_.size() +
1518 string_bss_entry_patches_.size();
Alexey Frunze19f6c692016-11-30 19:19:55 -08001519 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001520 if (GetCompilerOptions().IsBootImage()) {
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001521 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>(
1522 pc_relative_method_patches_, linker_patches);
1523 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeTypePatch>(
1524 pc_relative_type_patches_, linker_patches);
1525 EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>(
1526 pc_relative_string_patches_, linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001527 } else {
1528 DCHECK(pc_relative_method_patches_.empty());
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001529 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeClassTablePatch>(
1530 pc_relative_type_patches_, linker_patches);
1531 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringInternTablePatch>(
1532 pc_relative_string_patches_, linker_patches);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001533 }
Vladimir Markod8dbc8d2017-09-20 13:37:47 +01001534 EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>(
1535 method_bss_entry_patches_, linker_patches);
1536 EmitPcRelativeLinkerPatches<linker::LinkerPatch::TypeBssEntryPatch>(
1537 type_bss_entry_patches_, linker_patches);
1538 EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
1539 string_bss_entry_patches_, linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001540 DCHECK_EQ(size, linker_patches->size());
Alexey Frunzef63f5692016-12-13 17:43:11 -08001541}
1542
Vladimir Marko65979462017-05-19 17:25:12 +01001543CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001544 MethodReference target_method,
1545 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001546 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001547 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001548 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001549 &pc_relative_method_patches_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001550}
1551
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001552CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001553 MethodReference target_method,
1554 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001555 return NewPcRelativePatch(*target_method.dex_file,
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001556 target_method.index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001557 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001558 &method_bss_entry_patches_);
1559}
1560
Alexey Frunzef63f5692016-12-13 17:43:11 -08001561CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001562 const DexFile& dex_file,
1563 dex::TypeIndex type_index,
1564 const PcRelativePatchInfo* info_high) {
1565 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001566}
1567
Vladimir Marko1998cd02017-01-13 13:02:58 +00001568CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001569 const DexFile& dex_file,
1570 dex::TypeIndex type_index,
1571 const PcRelativePatchInfo* info_high) {
1572 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001573}
1574
Vladimir Marko65979462017-05-19 17:25:12 +01001575CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001576 const DexFile& dex_file,
1577 dex::StringIndex string_index,
1578 const PcRelativePatchInfo* info_high) {
1579 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001580}
1581
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001582CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
1583 const DexFile& dex_file,
1584 dex::StringIndex string_index,
1585 const PcRelativePatchInfo* info_high) {
1586 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1587}
1588
Alexey Frunze19f6c692016-11-30 19:19:55 -08001589CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001590 const DexFile& dex_file,
1591 uint32_t offset_or_index,
1592 const PcRelativePatchInfo* info_high,
1593 ArenaDeque<PcRelativePatchInfo>* patches) {
1594 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001595 return &patches->back();
1596}
1597
Alexey Frunzef63f5692016-12-13 17:43:11 -08001598Literal* CodeGeneratorMIPS64::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1599 return map->GetOrCreate(
1600 value,
1601 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1602}
1603
Alexey Frunze19f6c692016-11-30 19:19:55 -08001604Literal* CodeGeneratorMIPS64::DeduplicateUint64Literal(uint64_t value) {
1605 return uint64_literals_.GetOrCreate(
1606 value,
1607 [this, value]() { return __ NewLiteral<uint64_t>(value); });
1608}
1609
Alexey Frunzef63f5692016-12-13 17:43:11 -08001610Literal* CodeGeneratorMIPS64::DeduplicateBootImageAddressLiteral(uint64_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001611 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunzef63f5692016-12-13 17:43:11 -08001612}
1613
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001614void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
1615 GpuRegister out,
1616 PcRelativePatchInfo* info_low) {
1617 DCHECK(!info_high->patch_info_high);
1618 __ Bind(&info_high->label);
Alexey Frunze19f6c692016-11-30 19:19:55 -08001619 // Add the high half of a 32-bit offset to PC.
1620 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001621 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunzef63f5692016-12-13 17:43:11 -08001622 // offset to `out` (e.g. ld, jialc, daddiu).
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001623 if (info_low != nullptr) {
1624 DCHECK_EQ(info_low->patch_info_high, info_high);
1625 __ Bind(&info_low->label);
1626 }
Alexey Frunze19f6c692016-11-30 19:19:55 -08001627}
1628
Alexey Frunze627c1a02017-01-30 19:28:14 -08001629Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file,
1630 dex::StringIndex string_index,
1631 Handle<mirror::String> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001632 ReserveJitStringRoot(StringReference(&dex_file, string_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001633 return jit_string_patches_.GetOrCreate(
1634 StringReference(&dex_file, string_index),
1635 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1636}
1637
1638Literal* CodeGeneratorMIPS64::DeduplicateJitClassLiteral(const DexFile& dex_file,
1639 dex::TypeIndex type_index,
1640 Handle<mirror::Class> handle) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01001641 ReserveJitClassRoot(TypeReference(&dex_file, type_index), handle);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001642 return jit_class_patches_.GetOrCreate(
1643 TypeReference(&dex_file, type_index),
1644 [this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
1645}
1646
1647void CodeGeneratorMIPS64::PatchJitRootUse(uint8_t* code,
1648 const uint8_t* roots_data,
1649 const Literal* literal,
1650 uint64_t index_in_table) const {
1651 uint32_t literal_offset = GetAssembler().GetLabelLocation(literal->GetLabel());
1652 uintptr_t address =
1653 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1654 reinterpret_cast<uint32_t*>(code + literal_offset)[0] = dchecked_integral_cast<uint32_t>(address);
1655}
1656
1657void CodeGeneratorMIPS64::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1658 for (const auto& entry : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001659 const StringReference& string_reference = entry.first;
1660 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001661 uint64_t index_in_table = GetJitStringRootIndex(string_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001662 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001663 }
1664 for (const auto& entry : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001665 const TypeReference& type_reference = entry.first;
1666 Literal* table_entry_literal = entry.second;
Vladimir Marko174b2e22017-10-12 13:34:49 +01001667 uint64_t index_in_table = GetJitClassRootIndex(type_reference);
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001668 PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001669 }
1670}
1671
David Brazdil58282f42016-01-14 12:45:10 +00001672void CodeGeneratorMIPS64::SetupBlockedRegisters() const {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001673 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1674 blocked_core_registers_[ZERO] = true;
1675 blocked_core_registers_[K0] = true;
1676 blocked_core_registers_[K1] = true;
1677 blocked_core_registers_[GP] = true;
1678 blocked_core_registers_[SP] = true;
1679 blocked_core_registers_[RA] = true;
1680
Lazar Trsicd9672662015-09-03 17:33:01 +02001681 // AT, TMP(T8) and TMP2(T3) are used as temporary/scratch
1682 // registers (similar to how AT is used by MIPS assemblers).
Alexey Frunze4dda3372015-06-01 18:31:49 -07001683 blocked_core_registers_[AT] = true;
1684 blocked_core_registers_[TMP] = true;
Lazar Trsicd9672662015-09-03 17:33:01 +02001685 blocked_core_registers_[TMP2] = true;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001686 blocked_fpu_registers_[FTMP] = true;
1687
Goran Jakovljevice7de5ec2017-12-14 10:25:20 +01001688 if (GetInstructionSetFeatures().HasMsa()) {
1689 // To be used just for MSA instructions.
1690 blocked_fpu_registers_[FTMP2] = true;
1691 }
1692
Alexey Frunze4dda3372015-06-01 18:31:49 -07001693 // Reserve suspend and thread registers.
1694 blocked_core_registers_[S0] = true;
1695 blocked_core_registers_[TR] = true;
1696
1697 // Reserve T9 for function calls
1698 blocked_core_registers_[T9] = true;
1699
Goran Jakovljevic782be112016-06-21 12:39:04 +02001700 if (GetGraph()->IsDebuggable()) {
1701 // Stubs do not save callee-save floating point registers. If the graph
1702 // is debuggable, we need to deal with these registers differently. For
1703 // now, just block them.
1704 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1705 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1706 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001707 }
1708}
1709
Alexey Frunze4dda3372015-06-01 18:31:49 -07001710size_t CodeGeneratorMIPS64::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1711 __ StoreToOffset(kStoreDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001712 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001713}
1714
1715size_t CodeGeneratorMIPS64::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1716 __ LoadFromOffset(kLoadDoubleword, GpuRegister(reg_id), SP, stack_index);
Lazar Trsicd9672662015-09-03 17:33:01 +02001717 return kMips64DoublewordSize;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001718}
1719
1720size_t CodeGeneratorMIPS64::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001721 __ StoreFpuToOffset(GetGraph()->HasSIMD() ? kStoreQuadword : kStoreDoubleword,
1722 FpuRegister(reg_id),
1723 SP,
1724 stack_index);
1725 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001726}
1727
1728size_t CodeGeneratorMIPS64::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02001729 __ LoadFpuFromOffset(GetGraph()->HasSIMD() ? kLoadQuadword : kLoadDoubleword,
1730 FpuRegister(reg_id),
1731 SP,
1732 stack_index);
1733 return GetFloatingPointSpillSlotSize();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001734}
1735
1736void CodeGeneratorMIPS64::DumpCoreRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001737 stream << GpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001738}
1739
1740void CodeGeneratorMIPS64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
David Brazdil9f0dece2015-09-21 18:20:26 +01001741 stream << FpuRegister(reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001742}
1743
Calin Juravle175dc732015-08-25 15:42:32 +01001744void CodeGeneratorMIPS64::InvokeRuntime(QuickEntrypointEnum entrypoint,
Alexey Frunze4dda3372015-06-01 18:31:49 -07001745 HInstruction* instruction,
1746 uint32_t dex_pc,
1747 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001748 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001749 GenerateInvokeRuntime(GetThreadOffset<kMips64PointerSize>(entrypoint).Int32Value());
Serban Constantinescufc734082016-07-19 17:18:07 +01001750 if (EntrypointRequiresStackMap(entrypoint)) {
1751 RecordPcInfo(instruction, dex_pc, slow_path);
1752 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001753}
1754
Alexey Frunze15958152017-02-09 19:08:30 -08001755void CodeGeneratorMIPS64::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1756 HInstruction* instruction,
1757 SlowPathCode* slow_path) {
1758 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1759 GenerateInvokeRuntime(entry_point_offset);
1760}
1761
1762void CodeGeneratorMIPS64::GenerateInvokeRuntime(int32_t entry_point_offset) {
1763 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
1764 __ Jalr(T9);
1765 __ Nop();
1766}
1767
Alexey Frunze4dda3372015-06-01 18:31:49 -07001768void InstructionCodeGeneratorMIPS64::GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path,
1769 GpuRegister class_reg) {
Vladimir Markodc682aa2018-01-04 18:42:57 +00001770 constexpr size_t status_lsb_position = SubtypeCheckBits::BitStructSizeOf();
1771 const size_t status_byte_offset =
1772 mirror::Class::StatusOffset().SizeValue() + (status_lsb_position / kBitsPerByte);
1773 constexpr uint32_t shifted_initialized_value =
1774 enum_cast<uint32_t>(ClassStatus::kInitialized) << (status_lsb_position % kBitsPerByte);
1775
1776 __ LoadFromOffset(kLoadUnsignedByte, TMP, class_reg, status_byte_offset);
1777 __ LoadConst32(AT, shifted_initialized_value);
Vladimir Marko2c64a832018-01-04 11:31:56 +00001778 __ Bltuc(TMP, AT, slow_path->GetEntryLabel());
Alexey Frunze15958152017-02-09 19:08:30 -08001779 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1780 __ Sync(0);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001781 __ Bind(slow_path->GetExitLabel());
1782}
1783
1784void InstructionCodeGeneratorMIPS64::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1785 __ Sync(0); // only stype 0 is supported
1786}
1787
1788void InstructionCodeGeneratorMIPS64::GenerateSuspendCheck(HSuspendCheck* instruction,
1789 HBasicBlock* successor) {
1790 SuspendCheckSlowPathMIPS64* slow_path =
Chris Larsena2045912017-11-02 12:39:54 -07001791 down_cast<SuspendCheckSlowPathMIPS64*>(instruction->GetSlowPath());
1792
1793 if (slow_path == nullptr) {
1794 slow_path =
1795 new (codegen_->GetScopedAllocator()) SuspendCheckSlowPathMIPS64(instruction, successor);
1796 instruction->SetSlowPath(slow_path);
1797 codegen_->AddSlowPath(slow_path);
1798 if (successor != nullptr) {
1799 DCHECK(successor->IsLoopHeader());
1800 }
1801 } else {
1802 DCHECK_EQ(slow_path->GetSuccessor(), successor);
1803 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001804
1805 __ LoadFromOffset(kLoadUnsignedHalfword,
1806 TMP,
1807 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001808 Thread::ThreadFlagsOffset<kMips64PointerSize>().Int32Value());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001809 if (successor == nullptr) {
1810 __ Bnezc(TMP, slow_path->GetEntryLabel());
1811 __ Bind(slow_path->GetReturnLabel());
1812 } else {
1813 __ Beqzc(TMP, codegen_->GetLabelOf(successor));
Alexey Frunzea0e87b02015-09-24 22:57:20 -07001814 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001815 // slow_path will return to GetLabelOf(successor).
1816 }
1817}
1818
1819InstructionCodeGeneratorMIPS64::InstructionCodeGeneratorMIPS64(HGraph* graph,
1820 CodeGeneratorMIPS64* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001821 : InstructionCodeGenerator(graph, codegen),
Alexey Frunze4dda3372015-06-01 18:31:49 -07001822 assembler_(codegen->GetAssembler()),
1823 codegen_(codegen) {}
1824
1825void LocationsBuilderMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
1826 DCHECK_EQ(instruction->InputCount(), 2U);
Vladimir Markoca6fff82017-10-03 14:49:14 +01001827 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001828 DataType::Type type = instruction->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001829 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001830 case DataType::Type::kInt32:
1831 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001832 locations->SetInAt(0, Location::RequiresRegister());
1833 HInstruction* right = instruction->InputAt(1);
1834 bool can_use_imm = false;
1835 if (right->IsConstant()) {
1836 int64_t imm = CodeGenerator::GetInt64ValueOf(right->AsConstant());
1837 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1838 can_use_imm = IsUint<16>(imm);
Alexey Frunze4dda3372015-06-01 18:31:49 -07001839 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001840 DCHECK(instruction->IsAdd() || instruction->IsSub());
1841 bool single_use = right->GetUses().HasExactlyOneElement();
1842 if (instruction->IsSub()) {
1843 if (!(type == DataType::Type::kInt32 && imm == INT32_MIN)) {
1844 imm = -imm;
1845 }
1846 }
1847 if (type == DataType::Type::kInt32) {
1848 can_use_imm = IsInt<16>(imm) || (Low16Bits(imm) == 0) || single_use;
1849 } else {
1850 can_use_imm = IsInt<16>(imm) || (IsInt<32>(imm) && (Low16Bits(imm) == 0)) || single_use;
1851 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001852 }
1853 }
1854 if (can_use_imm)
1855 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
1856 else
1857 locations->SetInAt(1, Location::RequiresRegister());
1858 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1859 }
1860 break;
1861
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001862 case DataType::Type::kFloat32:
1863 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07001864 locations->SetInAt(0, Location::RequiresFpuRegister());
1865 locations->SetInAt(1, Location::RequiresFpuRegister());
1866 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1867 break;
1868
1869 default:
1870 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
1871 }
1872}
1873
1874void InstructionCodeGeneratorMIPS64::HandleBinaryOp(HBinaryOperation* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001875 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07001876 LocationSummary* locations = instruction->GetLocations();
1877
1878 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001879 case DataType::Type::kInt32:
1880 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001881 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
1882 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
1883 Location rhs_location = locations->InAt(1);
1884
1885 GpuRegister rhs_reg = ZERO;
1886 int64_t rhs_imm = 0;
1887 bool use_imm = rhs_location.IsConstant();
1888 if (use_imm) {
1889 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
1890 } else {
1891 rhs_reg = rhs_location.AsRegister<GpuRegister>();
1892 }
1893
1894 if (instruction->IsAnd()) {
1895 if (use_imm)
1896 __ Andi(dst, lhs, rhs_imm);
1897 else
1898 __ And(dst, lhs, rhs_reg);
1899 } else if (instruction->IsOr()) {
1900 if (use_imm)
1901 __ Ori(dst, lhs, rhs_imm);
1902 else
1903 __ Or(dst, lhs, rhs_reg);
1904 } else if (instruction->IsXor()) {
1905 if (use_imm)
1906 __ Xori(dst, lhs, rhs_imm);
1907 else
1908 __ Xor(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01001909 } else if (instruction->IsAdd() || instruction->IsSub()) {
1910 if (instruction->IsSub()) {
1911 rhs_imm = -rhs_imm;
Alexey Frunze4dda3372015-06-01 18:31:49 -07001912 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001913 if (type == DataType::Type::kInt32) {
Lena Djokic38530172017-11-16 11:11:50 +01001914 if (use_imm) {
1915 if (IsInt<16>(rhs_imm)) {
1916 __ Addiu(dst, lhs, rhs_imm);
1917 } else {
1918 int16_t rhs_imm_high = High16Bits(rhs_imm);
1919 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1920 if (rhs_imm_low < 0) {
1921 rhs_imm_high += 1;
1922 }
1923 __ Aui(dst, lhs, rhs_imm_high);
1924 if (rhs_imm_low != 0) {
1925 __ Addiu(dst, dst, rhs_imm_low);
1926 }
1927 }
1928 } else {
1929 if (instruction->IsAdd()) {
1930 __ Addu(dst, lhs, rhs_reg);
1931 } else {
1932 DCHECK(instruction->IsSub());
1933 __ Subu(dst, lhs, rhs_reg);
1934 }
1935 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001936 } else {
Lena Djokic38530172017-11-16 11:11:50 +01001937 if (use_imm) {
1938 if (IsInt<16>(rhs_imm)) {
1939 __ Daddiu(dst, lhs, rhs_imm);
1940 } else if (IsInt<32>(rhs_imm)) {
1941 int16_t rhs_imm_high = High16Bits(rhs_imm);
1942 int16_t rhs_imm_low = Low16Bits(rhs_imm);
1943 bool overflow_hi16 = false;
1944 if (rhs_imm_low < 0) {
1945 rhs_imm_high += 1;
1946 overflow_hi16 = (rhs_imm_high == -32768);
1947 }
1948 __ Daui(dst, lhs, rhs_imm_high);
1949 if (rhs_imm_low != 0) {
1950 __ Daddiu(dst, dst, rhs_imm_low);
1951 }
1952 if (overflow_hi16) {
1953 __ Dahi(dst, 1);
1954 }
1955 } else {
1956 int16_t rhs_imm_low = Low16Bits(Low32Bits(rhs_imm));
1957 if (rhs_imm_low < 0) {
1958 rhs_imm += (INT64_C(1) << 16);
1959 }
1960 int16_t rhs_imm_upper = High16Bits(Low32Bits(rhs_imm));
1961 if (rhs_imm_upper < 0) {
1962 rhs_imm += (INT64_C(1) << 32);
1963 }
1964 int16_t rhs_imm_high = Low16Bits(High32Bits(rhs_imm));
1965 if (rhs_imm_high < 0) {
1966 rhs_imm += (INT64_C(1) << 48);
1967 }
1968 int16_t rhs_imm_top = High16Bits(High32Bits(rhs_imm));
1969 GpuRegister tmp = lhs;
1970 if (rhs_imm_low != 0) {
1971 __ Daddiu(dst, tmp, rhs_imm_low);
1972 tmp = dst;
1973 }
1974 // Dahi and Dati must use the same input and output register, so we have to initialize
1975 // the dst register using Daddiu or Daui, even when the intermediate value is zero:
1976 // Daui(dst, lhs, 0).
1977 if ((rhs_imm_upper != 0) || (rhs_imm_low == 0)) {
1978 __ Daui(dst, tmp, rhs_imm_upper);
1979 }
1980 if (rhs_imm_high != 0) {
1981 __ Dahi(dst, rhs_imm_high);
1982 }
1983 if (rhs_imm_top != 0) {
1984 __ Dati(dst, rhs_imm_top);
1985 }
1986 }
1987 } else if (instruction->IsAdd()) {
1988 __ Daddu(dst, lhs, rhs_reg);
1989 } else {
1990 DCHECK(instruction->IsSub());
Alexey Frunze4dda3372015-06-01 18:31:49 -07001991 __ Dsubu(dst, lhs, rhs_reg);
Lena Djokic38530172017-11-16 11:11:50 +01001992 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07001993 }
1994 }
1995 break;
1996 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01001997 case DataType::Type::kFloat32:
1998 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07001999 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
2000 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
2001 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
2002 if (instruction->IsAdd()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002003 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002004 __ AddS(dst, lhs, rhs);
2005 else
2006 __ AddD(dst, lhs, rhs);
2007 } else if (instruction->IsSub()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002008 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07002009 __ SubS(dst, lhs, rhs);
2010 else
2011 __ SubD(dst, lhs, rhs);
2012 } else {
2013 LOG(FATAL) << "Unexpected floating-point binary operation";
2014 }
2015 break;
2016 }
2017 default:
2018 LOG(FATAL) << "Unexpected binary operation type " << type;
2019 }
2020}
2021
2022void LocationsBuilderMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002023 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002024
Vladimir Markoca6fff82017-10-03 14:49:14 +01002025 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instr);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002026 DataType::Type type = instr->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002027 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002028 case DataType::Type::kInt32:
2029 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002030 locations->SetInAt(0, Location::RequiresRegister());
2031 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07002032 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002033 break;
2034 }
2035 default:
2036 LOG(FATAL) << "Unexpected shift type " << type;
2037 }
2038}
2039
2040void InstructionCodeGeneratorMIPS64::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002041 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002042 LocationSummary* locations = instr->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002043 DataType::Type type = instr->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002044
2045 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002046 case DataType::Type::kInt32:
2047 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002048 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
2049 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
2050 Location rhs_location = locations->InAt(1);
2051
2052 GpuRegister rhs_reg = ZERO;
2053 int64_t rhs_imm = 0;
2054 bool use_imm = rhs_location.IsConstant();
2055 if (use_imm) {
2056 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
2057 } else {
2058 rhs_reg = rhs_location.AsRegister<GpuRegister>();
2059 }
2060
2061 if (use_imm) {
Roland Levillain5b5b9312016-03-22 14:57:31 +00002062 uint32_t shift_value = rhs_imm &
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002063 (type == DataType::Type::kInt32 ? kMaxIntShiftDistance : kMaxLongShiftDistance);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002064
Alexey Frunze92d90602015-12-18 18:16:36 -08002065 if (shift_value == 0) {
2066 if (dst != lhs) {
2067 __ Move(dst, lhs);
2068 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002069 } else if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002070 if (instr->IsShl()) {
2071 __ Sll(dst, lhs, shift_value);
2072 } else if (instr->IsShr()) {
2073 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002074 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002075 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002076 } else {
2077 __ Rotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002078 }
2079 } else {
2080 if (shift_value < 32) {
2081 if (instr->IsShl()) {
2082 __ Dsll(dst, lhs, shift_value);
2083 } else if (instr->IsShr()) {
2084 __ Dsra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002085 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002086 __ Dsrl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002087 } else {
2088 __ Drotr(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002089 }
2090 } else {
2091 shift_value -= 32;
2092 if (instr->IsShl()) {
2093 __ Dsll32(dst, lhs, shift_value);
2094 } else if (instr->IsShr()) {
2095 __ Dsra32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002096 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002097 __ Dsrl32(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002098 } else {
2099 __ Drotr32(dst, lhs, shift_value);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002100 }
2101 }
2102 }
2103 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002104 if (type == DataType::Type::kInt32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002105 if (instr->IsShl()) {
2106 __ Sllv(dst, lhs, rhs_reg);
2107 } else if (instr->IsShr()) {
2108 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002109 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002110 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002111 } else {
2112 __ Rotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002113 }
2114 } else {
2115 if (instr->IsShl()) {
2116 __ Dsllv(dst, lhs, rhs_reg);
2117 } else if (instr->IsShr()) {
2118 __ Dsrav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002119 } else if (instr->IsUShr()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002120 __ Dsrlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002121 } else {
2122 __ Drotrv(dst, lhs, rhs_reg);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002123 }
2124 }
2125 }
2126 break;
2127 }
2128 default:
2129 LOG(FATAL) << "Unexpected shift operation type " << type;
2130 }
2131}
2132
2133void LocationsBuilderMIPS64::VisitAdd(HAdd* instruction) {
2134 HandleBinaryOp(instruction);
2135}
2136
2137void InstructionCodeGeneratorMIPS64::VisitAdd(HAdd* instruction) {
2138 HandleBinaryOp(instruction);
2139}
2140
2141void LocationsBuilderMIPS64::VisitAnd(HAnd* instruction) {
2142 HandleBinaryOp(instruction);
2143}
2144
2145void InstructionCodeGeneratorMIPS64::VisitAnd(HAnd* instruction) {
2146 HandleBinaryOp(instruction);
2147}
2148
2149void LocationsBuilderMIPS64::VisitArrayGet(HArrayGet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002150 DataType::Type type = instruction->GetType();
Alexey Frunze15958152017-02-09 19:08:30 -08002151 bool object_array_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002152 kEmitCompilerReadBarrier && (type == DataType::Type::kReference);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002153 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01002154 new (GetGraph()->GetAllocator()) LocationSummary(instruction,
2155 object_array_get_with_read_barrier
2156 ? LocationSummary::kCallOnSlowPath
2157 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002158 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2159 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2160 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002161 locations->SetInAt(0, Location::RequiresRegister());
2162 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002163 if (DataType::IsFloatingPointType(type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002164 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2165 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002166 // The output overlaps in the case of an object array get with
2167 // read barriers enabled: we do not want the move to overwrite the
2168 // array's location, as we need it to emit the read barrier.
2169 locations->SetOut(Location::RequiresRegister(),
2170 object_array_get_with_read_barrier
2171 ? Location::kOutputOverlap
2172 : Location::kNoOutputOverlap);
2173 }
2174 // We need a temporary register for the read barrier marking slow
2175 // path in CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier.
2176 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002177 bool temp_needed = instruction->GetIndex()->IsConstant()
2178 ? !kBakerReadBarrierThunksEnableForFields
2179 : !kBakerReadBarrierThunksEnableForArrays;
2180 if (temp_needed) {
2181 locations->AddTemp(Location::RequiresRegister());
2182 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002183 }
2184}
2185
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002186static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS64* codegen) {
2187 auto null_checker = [codegen, instruction]() {
2188 codegen->MaybeRecordImplicitNullCheck(instruction);
2189 };
2190 return null_checker;
2191}
2192
Alexey Frunze4dda3372015-06-01 18:31:49 -07002193void InstructionCodeGeneratorMIPS64::VisitArrayGet(HArrayGet* instruction) {
2194 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002195 Location obj_loc = locations->InAt(0);
2196 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
2197 Location out_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002198 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002199 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002200 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002201
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002202 DataType::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002203 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2204 instruction->IsStringCharAt();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002205 switch (type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002206 case DataType::Type::kBool:
2207 case DataType::Type::kUint8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002208 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002209 if (index.IsConstant()) {
2210 size_t offset =
2211 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002212 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002213 } else {
2214 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002215 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002216 }
2217 break;
2218 }
2219
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002220 case DataType::Type::kInt8: {
Alexey Frunze15958152017-02-09 19:08:30 -08002221 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002222 if (index.IsConstant()) {
2223 size_t offset =
2224 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002225 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002226 } else {
2227 __ Daddu(TMP, obj, index.AsRegister<GpuRegister>());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002228 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002229 }
2230 break;
2231 }
2232
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002233 case DataType::Type::kUint16: {
Alexey Frunze15958152017-02-09 19:08:30 -08002234 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002235 if (maybe_compressed_char_at) {
2236 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002237 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002238 __ Dext(TMP, TMP, 0, 1);
2239 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2240 "Expecting 0=compressed, 1=uncompressed");
2241 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002242 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002243 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2244 if (maybe_compressed_char_at) {
2245 Mips64Label uncompressed_load, done;
2246 __ Bnezc(TMP, &uncompressed_load);
2247 __ LoadFromOffset(kLoadUnsignedByte,
2248 out,
2249 obj,
2250 data_offset + (const_index << TIMES_1));
2251 __ Bc(&done);
2252 __ Bind(&uncompressed_load);
2253 __ LoadFromOffset(kLoadUnsignedHalfword,
2254 out,
2255 obj,
2256 data_offset + (const_index << TIMES_2));
2257 __ Bind(&done);
2258 } else {
2259 __ LoadFromOffset(kLoadUnsignedHalfword,
2260 out,
2261 obj,
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002262 data_offset + (const_index << TIMES_2),
2263 null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002264 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002265 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002266 GpuRegister index_reg = index.AsRegister<GpuRegister>();
2267 if (maybe_compressed_char_at) {
2268 Mips64Label uncompressed_load, done;
2269 __ Bnezc(TMP, &uncompressed_load);
2270 __ Daddu(TMP, obj, index_reg);
2271 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2272 __ Bc(&done);
2273 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002274 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002275 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2276 __ Bind(&done);
2277 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002278 __ Dlsa(TMP, index_reg, obj, TIMES_2);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002279 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002280 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002281 }
2282 break;
2283 }
2284
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002285 case DataType::Type::kInt16: {
2286 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2287 if (index.IsConstant()) {
2288 size_t offset =
2289 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2290 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
2291 } else {
2292 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_2);
2293 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
2294 }
2295 break;
2296 }
2297
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002298 case DataType::Type::kInt32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002299 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002300 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002301 LoadOperandType load_type =
2302 (type == DataType::Type::kReference) ? kLoadUnsignedWord : kLoadWord;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002303 if (index.IsConstant()) {
2304 size_t offset =
2305 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002306 __ LoadFromOffset(load_type, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002307 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002308 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002309 __ LoadFromOffset(load_type, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002310 }
2311 break;
2312 }
2313
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002314 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002315 static_assert(
2316 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2317 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2318 // /* HeapReference<Object> */ out =
2319 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2320 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002321 bool temp_needed = index.IsConstant()
2322 ? !kBakerReadBarrierThunksEnableForFields
2323 : !kBakerReadBarrierThunksEnableForArrays;
2324 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002325 // Note that a potential implicit null check is handled in this
2326 // CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002327 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2328 if (index.IsConstant()) {
2329 // Array load with a constant index can be treated as a field load.
2330 size_t offset =
2331 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2332 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2333 out_loc,
2334 obj,
2335 offset,
2336 temp,
2337 /* needs_null_check */ false);
2338 } else {
2339 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2340 out_loc,
2341 obj,
2342 data_offset,
2343 index,
2344 temp,
2345 /* needs_null_check */ false);
2346 }
Alexey Frunze15958152017-02-09 19:08:30 -08002347 } else {
2348 GpuRegister out = out_loc.AsRegister<GpuRegister>();
2349 if (index.IsConstant()) {
2350 size_t offset =
2351 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2352 __ LoadFromOffset(kLoadUnsignedWord, out, obj, offset, null_checker);
2353 // If read barriers are enabled, emit read barriers other than
2354 // Baker's using a slow path (and also unpoison the loaded
2355 // reference, if heap poisoning is enabled).
2356 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2357 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002358 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002359 __ LoadFromOffset(kLoadUnsignedWord, out, TMP, data_offset, null_checker);
2360 // If read barriers are enabled, emit read barriers other than
2361 // Baker's using a slow path (and also unpoison the loaded
2362 // reference, if heap poisoning is enabled).
2363 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2364 out_loc,
2365 out_loc,
2366 obj_loc,
2367 data_offset,
2368 index);
2369 }
2370 }
2371 break;
2372 }
2373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002374 case DataType::Type::kInt64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002375 GpuRegister out = out_loc.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002376 if (index.IsConstant()) {
2377 size_t offset =
2378 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002379 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002380 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002381 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002382 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002383 }
2384 break;
2385 }
2386
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002387 case DataType::Type::kFloat32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002388 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002389 if (index.IsConstant()) {
2390 size_t offset =
2391 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002392 __ LoadFpuFromOffset(kLoadWord, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002393 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002394 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002395 __ LoadFpuFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002396 }
2397 break;
2398 }
2399
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002400 case DataType::Type::kFloat64: {
Alexey Frunze15958152017-02-09 19:08:30 -08002401 FpuRegister out = out_loc.AsFpuRegister<FpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002402 if (index.IsConstant()) {
2403 size_t offset =
2404 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002405 __ LoadFpuFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002406 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002407 __ Dlsa(TMP, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002408 __ LoadFpuFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002409 }
2410 break;
2411 }
2412
Aart Bik66c158e2018-01-31 12:55:04 -08002413 case DataType::Type::kUint32:
2414 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002415 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002416 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2417 UNREACHABLE();
2418 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002419}
2420
2421void LocationsBuilderMIPS64::VisitArrayLength(HArrayLength* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01002422 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002423 locations->SetInAt(0, Location::RequiresRegister());
2424 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2425}
2426
2427void InstructionCodeGeneratorMIPS64::VisitArrayLength(HArrayLength* instruction) {
2428 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002429 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002430 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2431 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
2432 __ LoadFromOffset(kLoadWord, out, obj, offset);
2433 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002434 // Mask out compression flag from String's array length.
2435 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2436 __ Srl(out, out, 1u);
2437 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002438}
2439
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002440Location LocationsBuilderMIPS64::RegisterOrZeroConstant(HInstruction* instruction) {
2441 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2442 ? Location::ConstantLocation(instruction->AsConstant())
2443 : Location::RequiresRegister();
2444}
2445
2446Location LocationsBuilderMIPS64::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2447 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2448 // We can store a non-zero float or double constant without first loading it into the FPU,
2449 // but we should only prefer this if the constant has a single use.
2450 if (instruction->IsConstant() &&
2451 (instruction->AsConstant()->IsZeroBitPattern() ||
2452 instruction->GetUses().HasExactlyOneElement())) {
2453 return Location::ConstantLocation(instruction->AsConstant());
2454 // Otherwise fall through and require an FPU register for the constant.
2455 }
2456 return Location::RequiresFpuRegister();
2457}
2458
Alexey Frunze4dda3372015-06-01 18:31:49 -07002459void LocationsBuilderMIPS64::VisitArraySet(HArraySet* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002460 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002461
2462 bool needs_write_barrier =
2463 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2464 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2465
Vladimir Markoca6fff82017-10-03 14:49:14 +01002466 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze4dda3372015-06-01 18:31:49 -07002467 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002468 may_need_runtime_call_for_type_check ?
2469 LocationSummary::kCallOnSlowPath :
2470 LocationSummary::kNoCall);
2471
2472 locations->SetInAt(0, Location::RequiresRegister());
2473 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002474 if (DataType::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
Alexey Frunze15958152017-02-09 19:08:30 -08002475 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002476 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002477 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2478 }
2479 if (needs_write_barrier) {
2480 // Temporary register for the write barrier.
2481 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002482 }
2483}
2484
2485void InstructionCodeGeneratorMIPS64::VisitArraySet(HArraySet* instruction) {
2486 LocationSummary* locations = instruction->GetLocations();
2487 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
2488 Location index = locations->InAt(1);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002489 Location value_location = locations->InAt(2);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002490 DataType::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002491 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002492 bool needs_write_barrier =
2493 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002494 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002495 GpuRegister base_reg = index.IsConstant() ? obj : TMP;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002496
2497 switch (value_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002498 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002499 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002500 case DataType::Type::kInt8: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002501 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002502 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002503 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002504 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002505 __ Daddu(base_reg, obj, index.AsRegister<GpuRegister>());
2506 }
2507 if (value_location.IsConstant()) {
2508 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2509 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2510 } else {
2511 GpuRegister value = value_location.AsRegister<GpuRegister>();
2512 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002513 }
2514 break;
2515 }
2516
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01002517 case DataType::Type::kUint16:
2518 case DataType::Type::kInt16: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002519 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002520 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002521 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002522 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002523 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_2);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002524 }
2525 if (value_location.IsConstant()) {
2526 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2527 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2528 } else {
2529 GpuRegister value = value_location.AsRegister<GpuRegister>();
2530 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002531 }
2532 break;
2533 }
2534
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002535 case DataType::Type::kInt32: {
Alexey Frunze15958152017-02-09 19:08:30 -08002536 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2537 if (index.IsConstant()) {
2538 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2539 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002540 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002541 }
2542 if (value_location.IsConstant()) {
2543 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2544 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2545 } else {
2546 GpuRegister value = value_location.AsRegister<GpuRegister>();
2547 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2548 }
2549 break;
2550 }
2551
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002552 case DataType::Type::kReference: {
Alexey Frunze15958152017-02-09 19:08:30 -08002553 if (value_location.IsConstant()) {
2554 // Just setting null.
Alexey Frunze4dda3372015-06-01 18:31:49 -07002555 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002556 if (index.IsConstant()) {
Alexey Frunzec061de12017-02-14 13:27:23 -08002557 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002558 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002559 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunzec061de12017-02-14 13:27:23 -08002560 }
Alexey Frunze15958152017-02-09 19:08:30 -08002561 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2562 DCHECK_EQ(value, 0);
2563 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2564 DCHECK(!needs_write_barrier);
2565 DCHECK(!may_need_runtime_call_for_type_check);
2566 break;
2567 }
2568
2569 DCHECK(needs_write_barrier);
2570 GpuRegister value = value_location.AsRegister<GpuRegister>();
2571 GpuRegister temp1 = locations->GetTemp(0).AsRegister<GpuRegister>();
2572 GpuRegister temp2 = TMP; // Doesn't need to survive slow path.
2573 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2574 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2575 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2576 Mips64Label done;
2577 SlowPathCodeMIPS64* slow_path = nullptr;
2578
2579 if (may_need_runtime_call_for_type_check) {
Vladimir Marko174b2e22017-10-12 13:34:49 +01002580 slow_path = new (codegen_->GetScopedAllocator()) ArraySetSlowPathMIPS64(instruction);
Alexey Frunze15958152017-02-09 19:08:30 -08002581 codegen_->AddSlowPath(slow_path);
2582 if (instruction->GetValueCanBeNull()) {
2583 Mips64Label non_zero;
2584 __ Bnezc(value, &non_zero);
2585 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2586 if (index.IsConstant()) {
2587 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002588 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002589 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002590 }
Alexey Frunze15958152017-02-09 19:08:30 -08002591 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2592 __ Bc(&done);
2593 __ Bind(&non_zero);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002594 }
Alexey Frunze15958152017-02-09 19:08:30 -08002595
2596 // Note that when read barriers are enabled, the type checks
2597 // are performed without read barriers. This is fine, even in
2598 // the case where a class object is in the from-space after
2599 // the flip, as a comparison involving such a type would not
2600 // produce a false positive; it may of course produce a false
2601 // negative, in which case we would take the ArraySet slow
2602 // path.
2603
2604 // /* HeapReference<Class> */ temp1 = obj->klass_
2605 __ LoadFromOffset(kLoadUnsignedWord, temp1, obj, class_offset, null_checker);
2606 __ MaybeUnpoisonHeapReference(temp1);
2607
2608 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2609 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, component_offset);
2610 // /* HeapReference<Class> */ temp2 = value->klass_
2611 __ LoadFromOffset(kLoadUnsignedWord, temp2, value, class_offset);
2612 // If heap poisoning is enabled, no need to unpoison `temp1`
2613 // nor `temp2`, as we are comparing two poisoned references.
2614
2615 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2616 Mips64Label do_put;
2617 __ Beqc(temp1, temp2, &do_put);
2618 // If heap poisoning is enabled, the `temp1` reference has
2619 // not been unpoisoned yet; unpoison it now.
2620 __ MaybeUnpoisonHeapReference(temp1);
2621
2622 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2623 __ LoadFromOffset(kLoadUnsignedWord, temp1, temp1, super_offset);
2624 // If heap poisoning is enabled, no need to unpoison
2625 // `temp1`, as we are comparing against null below.
2626 __ Bnezc(temp1, slow_path->GetEntryLabel());
2627 __ Bind(&do_put);
2628 } else {
2629 __ Bnec(temp1, temp2, slow_path->GetEntryLabel());
2630 }
2631 }
2632
2633 GpuRegister source = value;
2634 if (kPoisonHeapReferences) {
2635 // Note that in the case where `value` is a null reference,
2636 // we do not enter this block, as a null reference does not
2637 // need poisoning.
2638 __ Move(temp1, value);
2639 __ PoisonHeapReference(temp1);
2640 source = temp1;
2641 }
2642
2643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2644 if (index.IsConstant()) {
2645 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002646 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002647 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Alexey Frunze15958152017-02-09 19:08:30 -08002648 }
2649 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2650
2651 if (!may_need_runtime_call_for_type_check) {
2652 codegen_->MaybeRecordImplicitNullCheck(instruction);
2653 }
2654
2655 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2656
2657 if (done.IsLinked()) {
2658 __ Bind(&done);
2659 }
2660
2661 if (slow_path != nullptr) {
2662 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002663 }
2664 break;
2665 }
2666
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002667 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002668 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002669 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002670 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002671 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002672 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002673 }
2674 if (value_location.IsConstant()) {
2675 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2676 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2677 } else {
2678 GpuRegister value = value_location.AsRegister<GpuRegister>();
2679 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002680 }
2681 break;
2682 }
2683
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002684 case DataType::Type::kFloat32: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002685 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002686 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002687 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002688 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002689 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_4);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002690 }
2691 if (value_location.IsConstant()) {
2692 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2693 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2694 } else {
2695 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2696 __ StoreFpuToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002697 }
2698 break;
2699 }
2700
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002701 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07002702 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002703 if (index.IsConstant()) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002704 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002705 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002706 __ Dlsa(base_reg, index.AsRegister<GpuRegister>(), obj, TIMES_8);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01002707 }
2708 if (value_location.IsConstant()) {
2709 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2710 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2711 } else {
2712 FpuRegister value = value_location.AsFpuRegister<FpuRegister>();
2713 __ StoreFpuToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002714 }
2715 break;
2716 }
2717
Aart Bik66c158e2018-01-31 12:55:04 -08002718 case DataType::Type::kUint32:
2719 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01002720 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07002721 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2722 UNREACHABLE();
2723 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002724}
2725
2726void LocationsBuilderMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01002727 RegisterSet caller_saves = RegisterSet::Empty();
2728 InvokeRuntimeCallingConvention calling_convention;
2729 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2730 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2731 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002732
2733 HInstruction* index = instruction->InputAt(0);
2734 HInstruction* length = instruction->InputAt(1);
2735
2736 bool const_index = false;
2737 bool const_length = false;
2738
2739 if (index->IsConstant()) {
2740 if (length->IsConstant()) {
2741 const_index = true;
2742 const_length = true;
2743 } else {
2744 int32_t index_value = index->AsIntConstant()->GetValue();
2745 if (index_value < 0 || IsInt<16>(index_value + 1)) {
2746 const_index = true;
2747 }
2748 }
2749 } else if (length->IsConstant()) {
2750 int32_t length_value = length->AsIntConstant()->GetValue();
2751 if (IsUint<15>(length_value)) {
2752 const_length = true;
2753 }
2754 }
2755
2756 locations->SetInAt(0, const_index
2757 ? Location::ConstantLocation(index->AsConstant())
2758 : Location::RequiresRegister());
2759 locations->SetInAt(1, const_length
2760 ? Location::ConstantLocation(length->AsConstant())
2761 : Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07002762}
2763
2764void InstructionCodeGeneratorMIPS64::VisitBoundsCheck(HBoundsCheck* instruction) {
2765 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002766 Location index_loc = locations->InAt(0);
2767 Location length_loc = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002768
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002769 if (length_loc.IsConstant()) {
2770 int32_t length = length_loc.GetConstant()->AsIntConstant()->GetValue();
2771 if (index_loc.IsConstant()) {
2772 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2773 if (index < 0 || index >= length) {
2774 BoundsCheckSlowPathMIPS64* slow_path =
2775 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2776 codegen_->AddSlowPath(slow_path);
2777 __ Bc(slow_path->GetEntryLabel());
2778 } else {
2779 // Nothing to be done.
2780 }
2781 return;
2782 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002783
Goran Jakovljevicdbd43032017-11-15 16:31:56 +01002784 BoundsCheckSlowPathMIPS64* slow_path =
2785 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2786 codegen_->AddSlowPath(slow_path);
2787 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2788 if (length == 0) {
2789 __ Bc(slow_path->GetEntryLabel());
2790 } else if (length == 1) {
2791 __ Bnezc(index, slow_path->GetEntryLabel());
2792 } else {
2793 DCHECK(IsUint<15>(length)) << length;
2794 __ Sltiu(TMP, index, length);
2795 __ Beqzc(TMP, slow_path->GetEntryLabel());
2796 }
2797 } else {
2798 GpuRegister length = length_loc.AsRegister<GpuRegister>();
2799 BoundsCheckSlowPathMIPS64* slow_path =
2800 new (codegen_->GetScopedAllocator()) BoundsCheckSlowPathMIPS64(instruction);
2801 codegen_->AddSlowPath(slow_path);
2802 if (index_loc.IsConstant()) {
2803 int32_t index = index_loc.GetConstant()->AsIntConstant()->GetValue();
2804 if (index < 0) {
2805 __ Bc(slow_path->GetEntryLabel());
2806 } else if (index == 0) {
2807 __ Blezc(length, slow_path->GetEntryLabel());
2808 } else {
2809 DCHECK(IsInt<16>(index + 1)) << index;
2810 __ Sltiu(TMP, length, index + 1);
2811 __ Bnezc(TMP, slow_path->GetEntryLabel());
2812 }
2813 } else {
2814 GpuRegister index = index_loc.AsRegister<GpuRegister>();
2815 __ Bgeuc(index, length, slow_path->GetEntryLabel());
2816 }
2817 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07002818}
2819
Alexey Frunze15958152017-02-09 19:08:30 -08002820// Temp is used for read barrier.
2821static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
2822 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002823 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08002824 (kUseBakerReadBarrier ||
2825 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
2826 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
2827 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
2828 return 1;
2829 }
2830 return 0;
2831}
2832
2833// Extra temp is used for read barrier.
2834static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
2835 return 1 + NumberOfInstanceOfTemps(type_check_kind);
2836}
2837
Alexey Frunze4dda3372015-06-01 18:31:49 -07002838void LocationsBuilderMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002839 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002840 LocationSummary::CallKind call_kind = CodeGenerator::GetCheckCastCallKind(instruction);
Vladimir Markoca6fff82017-10-03 14:49:14 +01002841 LocationSummary* locations =
2842 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002843 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002844 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08002845 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07002846}
2847
2848void InstructionCodeGeneratorMIPS64::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002849 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07002850 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002851 Location obj_loc = locations->InAt(0);
2852 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002853 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08002854 Location temp_loc = locations->GetTemp(0);
2855 GpuRegister temp = temp_loc.AsRegister<GpuRegister>();
2856 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
2857 DCHECK_LE(num_temps, 2u);
2858 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002859 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2860 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2861 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2862 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2863 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
2864 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
2865 const uint32_t object_array_data_offset =
2866 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2867 Mips64Label done;
Alexey Frunze4dda3372015-06-01 18:31:49 -07002868
Alexey Frunzedfc30af2018-01-24 16:25:10 -08002869 bool is_type_check_slow_path_fatal = CodeGenerator::IsTypeCheckSlowPathFatal(instruction);
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01002870 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01002871 new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
2872 instruction, is_type_check_slow_path_fatal);
Alexey Frunze4dda3372015-06-01 18:31:49 -07002873 codegen_->AddSlowPath(slow_path);
2874
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002875 // Avoid this check if we know `obj` is not null.
2876 if (instruction->MustDoNullCheck()) {
2877 __ Beqzc(obj, &done);
2878 }
2879
2880 switch (type_check_kind) {
2881 case TypeCheckKind::kExactCheck:
2882 case TypeCheckKind::kArrayCheck: {
2883 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002884 GenerateReferenceLoadTwoRegisters(instruction,
2885 temp_loc,
2886 obj_loc,
2887 class_offset,
2888 maybe_temp2_loc,
2889 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002890 // Jump to slow path for throwing the exception or doing a
2891 // more involved array check.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002892 __ Bnec(temp, cls, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002893 break;
2894 }
2895
2896 case TypeCheckKind::kAbstractClassCheck: {
2897 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002898 GenerateReferenceLoadTwoRegisters(instruction,
2899 temp_loc,
2900 obj_loc,
2901 class_offset,
2902 maybe_temp2_loc,
2903 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002904 // If the class is abstract, we eagerly fetch the super class of the
2905 // object to avoid doing a comparison we know will fail.
2906 Mips64Label loop;
2907 __ Bind(&loop);
2908 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002909 GenerateReferenceLoadOneRegister(instruction,
2910 temp_loc,
2911 super_offset,
2912 maybe_temp2_loc,
2913 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002914 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2915 // exception.
2916 __ Beqzc(temp, slow_path->GetEntryLabel());
2917 // Otherwise, compare the classes.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002918 __ Bnec(temp, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002919 break;
2920 }
2921
2922 case TypeCheckKind::kClassHierarchyCheck: {
2923 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002924 GenerateReferenceLoadTwoRegisters(instruction,
2925 temp_loc,
2926 obj_loc,
2927 class_offset,
2928 maybe_temp2_loc,
2929 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002930 // Walk over the class hierarchy to find a match.
2931 Mips64Label loop;
2932 __ Bind(&loop);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002933 __ Beqc(temp, cls, &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002934 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08002935 GenerateReferenceLoadOneRegister(instruction,
2936 temp_loc,
2937 super_offset,
2938 maybe_temp2_loc,
2939 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002940 // If the class reference currently in `temp` is null, jump to the slow path to throw the
2941 // exception. Otherwise, jump to the beginning of the loop.
2942 __ Bnezc(temp, &loop);
2943 __ Bc(slow_path->GetEntryLabel());
2944 break;
2945 }
2946
2947 case TypeCheckKind::kArrayObjectCheck: {
2948 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002949 GenerateReferenceLoadTwoRegisters(instruction,
2950 temp_loc,
2951 obj_loc,
2952 class_offset,
2953 maybe_temp2_loc,
2954 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002955 // Do an exact check.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00002956 __ Beqc(temp, cls, &done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002957 // Otherwise, we need to check that the object's class is a non-primitive array.
2958 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08002959 GenerateReferenceLoadOneRegister(instruction,
2960 temp_loc,
2961 component_offset,
2962 maybe_temp2_loc,
2963 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002964 // If the component type is null, jump to the slow path to throw the exception.
2965 __ Beqzc(temp, slow_path->GetEntryLabel());
2966 // Otherwise, the object is indeed an array, further check that this component
2967 // type is not a primitive type.
2968 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
2969 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2970 __ Bnezc(temp, slow_path->GetEntryLabel());
2971 break;
2972 }
2973
2974 case TypeCheckKind::kUnresolvedCheck:
2975 // We always go into the type check slow path for the unresolved check case.
2976 // We cannot directly call the CheckCast runtime entry point
2977 // without resorting to a type checking slow path here (i.e. by
2978 // calling InvokeRuntime directly), as it would require to
2979 // assign fixed registers for the inputs of this HInstanceOf
2980 // instruction (following the runtime calling convention), which
2981 // might be cluttered by the potential first read barrier
2982 // emission at the beginning of this method.
2983 __ Bc(slow_path->GetEntryLabel());
2984 break;
2985
2986 case TypeCheckKind::kInterfaceCheck: {
2987 // Avoid read barriers to improve performance of the fast path. We can not get false
2988 // positives by doing this.
2989 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08002990 GenerateReferenceLoadTwoRegisters(instruction,
2991 temp_loc,
2992 obj_loc,
2993 class_offset,
2994 maybe_temp2_loc,
2995 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08002996 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08002997 GenerateReferenceLoadTwoRegisters(instruction,
2998 temp_loc,
2999 temp_loc,
3000 iftable_offset,
3001 maybe_temp2_loc,
3002 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003003 // Iftable is never null.
3004 __ Lw(TMP, temp, array_length_offset);
3005 // Loop through the iftable and check if any class matches.
3006 Mips64Label loop;
3007 __ Bind(&loop);
3008 __ Beqzc(TMP, slow_path->GetEntryLabel());
3009 __ Lwu(AT, temp, object_array_data_offset);
3010 __ MaybeUnpoisonHeapReference(AT);
3011 // Go to next interface.
3012 __ Daddiu(temp, temp, 2 * kHeapReferenceSize);
3013 __ Addiu(TMP, TMP, -2);
3014 // Compare the classes and continue the loop if they do not match.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00003015 __ Bnec(AT, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003016 break;
3017 }
3018 }
3019
3020 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003021 __ Bind(slow_path->GetExitLabel());
3022}
3023
3024void LocationsBuilderMIPS64::VisitClinitCheck(HClinitCheck* check) {
3025 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003026 new (GetGraph()->GetAllocator()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003027 locations->SetInAt(0, Location::RequiresRegister());
3028 if (check->HasUses()) {
3029 locations->SetOut(Location::SameAsFirstInput());
3030 }
3031}
3032
3033void InstructionCodeGeneratorMIPS64::VisitClinitCheck(HClinitCheck* check) {
3034 // We assume the class is not null.
Vladimir Marko174b2e22017-10-12 13:34:49 +01003035 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Alexey Frunze4dda3372015-06-01 18:31:49 -07003036 check->GetLoadClass(),
3037 check,
3038 check->GetDexPc(),
3039 true);
3040 codegen_->AddSlowPath(slow_path);
3041 GenerateClassInitializationCheck(slow_path,
3042 check->GetLocations()->InAt(0).AsRegister<GpuRegister>());
3043}
3044
3045void LocationsBuilderMIPS64::VisitCompare(HCompare* compare) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003046 DataType::Type in_type = compare->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003047
Vladimir Markoca6fff82017-10-03 14:49:14 +01003048 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(compare);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003049
3050 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003051 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003052 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003053 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003054 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003055 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003056 case DataType::Type::kInt32:
3057 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003058 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003059 locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003060 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3061 break;
3062
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003063 case DataType::Type::kFloat32:
3064 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003065 locations->SetInAt(0, Location::RequiresFpuRegister());
3066 locations->SetInAt(1, Location::RequiresFpuRegister());
3067 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003068 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003069
3070 default:
3071 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3072 }
3073}
3074
3075void InstructionCodeGeneratorMIPS64::VisitCompare(HCompare* instruction) {
3076 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003077 GpuRegister res = locations->Out().AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003078 DataType::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003079
3080 // 0 if: left == right
3081 // 1 if: left > right
3082 // -1 if: left < right
3083 switch (in_type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003084 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003085 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003086 case DataType::Type::kInt8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003087 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01003088 case DataType::Type::kInt16:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003089 case DataType::Type::kInt32:
3090 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003091 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003092 Location rhs_location = locations->InAt(1);
3093 bool use_imm = rhs_location.IsConstant();
3094 GpuRegister rhs = ZERO;
3095 if (use_imm) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003096 if (in_type == DataType::Type::kInt64) {
Aart Bika19616e2016-02-01 18:57:58 -08003097 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
3098 if (value != 0) {
3099 rhs = AT;
3100 __ LoadConst64(rhs, value);
3101 }
Roland Levillaina5c4a402016-03-15 15:02:50 +00003102 } else {
3103 int32_t value = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant()->AsConstant());
3104 if (value != 0) {
3105 rhs = AT;
3106 __ LoadConst32(rhs, value);
3107 }
Alexey Frunze5c75ffa2015-09-24 14:41:59 -07003108 }
3109 } else {
3110 rhs = rhs_location.AsRegister<GpuRegister>();
3111 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07003112 __ Slt(TMP, lhs, rhs);
Alexey Frunze299a9392015-12-08 16:08:02 -08003113 __ Slt(res, rhs, lhs);
3114 __ Subu(res, res, TMP);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003115 break;
3116 }
3117
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003118 case DataType::Type::kFloat32: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003119 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3120 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3121 Mips64Label done;
3122 __ CmpEqS(FTMP, lhs, rhs);
3123 __ LoadConst32(res, 0);
3124 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003125 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003126 __ CmpLtS(FTMP, lhs, rhs);
3127 __ LoadConst32(res, -1);
3128 __ Bc1nez(FTMP, &done);
3129 __ LoadConst32(res, 1);
3130 } else {
3131 __ CmpLtS(FTMP, rhs, lhs);
3132 __ LoadConst32(res, 1);
3133 __ Bc1nez(FTMP, &done);
3134 __ LoadConst32(res, -1);
3135 }
3136 __ Bind(&done);
3137 break;
3138 }
3139
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003140 case DataType::Type::kFloat64: {
Alexey Frunze299a9392015-12-08 16:08:02 -08003141 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3142 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
3143 Mips64Label done;
3144 __ CmpEqD(FTMP, lhs, rhs);
3145 __ LoadConst32(res, 0);
3146 __ Bc1nez(FTMP, &done);
Roland Levillain32ca3752016-02-17 16:49:37 +00003147 if (instruction->IsGtBias()) {
Alexey Frunze299a9392015-12-08 16:08:02 -08003148 __ CmpLtD(FTMP, lhs, rhs);
3149 __ LoadConst32(res, -1);
3150 __ Bc1nez(FTMP, &done);
3151 __ LoadConst32(res, 1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003152 } else {
Alexey Frunze299a9392015-12-08 16:08:02 -08003153 __ CmpLtD(FTMP, rhs, lhs);
3154 __ LoadConst32(res, 1);
3155 __ Bc1nez(FTMP, &done);
3156 __ LoadConst32(res, -1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003157 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003158 __ Bind(&done);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003159 break;
3160 }
3161
3162 default:
3163 LOG(FATAL) << "Unimplemented compare type " << in_type;
3164 }
3165}
3166
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003167void LocationsBuilderMIPS64::HandleCondition(HCondition* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01003168 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze299a9392015-12-08 16:08:02 -08003169 switch (instruction->InputAt(0)->GetType()) {
3170 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003171 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003172 locations->SetInAt(0, Location::RequiresRegister());
3173 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3174 break;
3175
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003176 case DataType::Type::kFloat32:
3177 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003178 locations->SetInAt(0, Location::RequiresFpuRegister());
3179 locations->SetInAt(1, Location::RequiresFpuRegister());
3180 break;
3181 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003182 if (!instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003183 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3184 }
3185}
3186
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003187void InstructionCodeGeneratorMIPS64::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003188 if (instruction->IsEmittedAtUseSite()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003189 return;
3190 }
3191
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003192 DataType::Type type = instruction->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003193 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze299a9392015-12-08 16:08:02 -08003194 switch (type) {
3195 default:
3196 // Integer case.
3197 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ false, locations);
3198 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003199 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08003200 GenerateIntLongCompare(instruction->GetCondition(), /* is64bit */ true, locations);
3201 return;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003202 case DataType::Type::kFloat32:
3203 case DataType::Type::kFloat64:
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003204 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3205 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003206 }
3207}
3208
Alexey Frunzec857c742015-09-23 15:12:39 -07003209void InstructionCodeGeneratorMIPS64::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3210 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003211 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003212
3213 LocationSummary* locations = instruction->GetLocations();
3214 Location second = locations->InAt(1);
3215 DCHECK(second.IsConstant());
3216
3217 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3218 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3219 int64_t imm = Int64FromConstant(second.GetConstant());
3220 DCHECK(imm == 1 || imm == -1);
3221
3222 if (instruction->IsRem()) {
3223 __ Move(out, ZERO);
3224 } else {
3225 if (imm == -1) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003226 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003227 __ Subu(out, ZERO, dividend);
3228 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003229 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003230 __ Dsubu(out, ZERO, dividend);
3231 }
3232 } else if (out != dividend) {
3233 __ Move(out, dividend);
3234 }
3235 }
3236}
3237
3238void InstructionCodeGeneratorMIPS64::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3239 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003240 DataType::Type type = instruction->GetResultType();
Alexey Frunzec857c742015-09-23 15:12:39 -07003241
3242 LocationSummary* locations = instruction->GetLocations();
3243 Location second = locations->InAt(1);
3244 DCHECK(second.IsConstant());
3245
3246 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3247 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3248 int64_t imm = Int64FromConstant(second.GetConstant());
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003249 uint64_t abs_imm = static_cast<uint64_t>(AbsOrMin(imm));
Alexey Frunzec857c742015-09-23 15:12:39 -07003250 int ctz_imm = CTZ(abs_imm);
3251
3252 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003253 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003254 if (ctz_imm == 1) {
3255 // Fast path for division by +/-2, which is very common.
3256 __ Srl(TMP, dividend, 31);
3257 } else {
3258 __ Sra(TMP, dividend, 31);
3259 __ Srl(TMP, TMP, 32 - ctz_imm);
3260 }
3261 __ Addu(out, dividend, TMP);
3262 __ Sra(out, out, ctz_imm);
3263 if (imm < 0) {
3264 __ Subu(out, ZERO, out);
3265 }
3266 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003267 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003268 if (ctz_imm == 1) {
3269 // Fast path for division by +/-2, which is very common.
3270 __ Dsrl32(TMP, dividend, 31);
3271 } else {
3272 __ Dsra32(TMP, dividend, 31);
3273 if (ctz_imm > 32) {
3274 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3275 } else {
3276 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3277 }
3278 }
3279 __ Daddu(out, dividend, TMP);
3280 if (ctz_imm < 32) {
3281 __ Dsra(out, out, ctz_imm);
3282 } else {
3283 __ Dsra32(out, out, ctz_imm - 32);
3284 }
3285 if (imm < 0) {
3286 __ Dsubu(out, ZERO, out);
3287 }
3288 }
3289 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003290 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003291 if (ctz_imm == 1) {
3292 // Fast path for modulo +/-2, which is very common.
3293 __ Sra(TMP, dividend, 31);
3294 __ Subu(out, dividend, TMP);
3295 __ Andi(out, out, 1);
3296 __ Addu(out, out, TMP);
3297 } else {
3298 __ Sra(TMP, dividend, 31);
3299 __ Srl(TMP, TMP, 32 - ctz_imm);
3300 __ Addu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003301 __ Ins(out, ZERO, ctz_imm, 32 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003302 __ Subu(out, out, TMP);
3303 }
3304 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003305 DCHECK_EQ(type, DataType::Type::kInt64);
Alexey Frunzec857c742015-09-23 15:12:39 -07003306 if (ctz_imm == 1) {
3307 // Fast path for modulo +/-2, which is very common.
3308 __ Dsra32(TMP, dividend, 31);
3309 __ Dsubu(out, dividend, TMP);
3310 __ Andi(out, out, 1);
3311 __ Daddu(out, out, TMP);
3312 } else {
3313 __ Dsra32(TMP, dividend, 31);
3314 if (ctz_imm > 32) {
3315 __ Dsrl(TMP, TMP, 64 - ctz_imm);
3316 } else {
3317 __ Dsrl32(TMP, TMP, 32 - ctz_imm);
3318 }
3319 __ Daddu(out, dividend, TMP);
Lena Djokica556e6b2017-12-13 12:09:42 +01003320 __ DblIns(out, ZERO, ctz_imm, 64 - ctz_imm);
Alexey Frunzec857c742015-09-23 15:12:39 -07003321 __ Dsubu(out, out, TMP);
3322 }
3323 }
3324 }
3325}
3326
3327void InstructionCodeGeneratorMIPS64::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3328 DCHECK(instruction->IsDiv() || instruction->IsRem());
3329
3330 LocationSummary* locations = instruction->GetLocations();
3331 Location second = locations->InAt(1);
3332 DCHECK(second.IsConstant());
3333
3334 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3335 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3336 int64_t imm = Int64FromConstant(second.GetConstant());
3337
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003338 DataType::Type type = instruction->GetResultType();
3339 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003340
3341 int64_t magic;
3342 int shift;
3343 CalculateMagicAndShiftForDivRem(imm,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003344 (type == DataType::Type::kInt64),
Alexey Frunzec857c742015-09-23 15:12:39 -07003345 &magic,
3346 &shift);
3347
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003348 if (type == DataType::Type::kInt32) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003349 __ LoadConst32(TMP, magic);
3350 __ MuhR6(TMP, dividend, TMP);
3351
3352 if (imm > 0 && magic < 0) {
3353 __ Addu(TMP, TMP, dividend);
3354 } else if (imm < 0 && magic > 0) {
3355 __ Subu(TMP, TMP, dividend);
3356 }
3357
3358 if (shift != 0) {
3359 __ Sra(TMP, TMP, shift);
3360 }
3361
3362 if (instruction->IsDiv()) {
3363 __ Sra(out, TMP, 31);
3364 __ Subu(out, TMP, out);
3365 } else {
3366 __ Sra(AT, TMP, 31);
3367 __ Subu(AT, TMP, AT);
3368 __ LoadConst32(TMP, imm);
3369 __ MulR6(TMP, AT, TMP);
3370 __ Subu(out, dividend, TMP);
3371 }
3372 } else {
3373 __ LoadConst64(TMP, magic);
3374 __ Dmuh(TMP, dividend, TMP);
3375
3376 if (imm > 0 && magic < 0) {
3377 __ Daddu(TMP, TMP, dividend);
3378 } else if (imm < 0 && magic > 0) {
3379 __ Dsubu(TMP, TMP, dividend);
3380 }
3381
3382 if (shift >= 32) {
3383 __ Dsra32(TMP, TMP, shift - 32);
3384 } else if (shift > 0) {
3385 __ Dsra(TMP, TMP, shift);
3386 }
3387
3388 if (instruction->IsDiv()) {
3389 __ Dsra32(out, TMP, 31);
3390 __ Dsubu(out, TMP, out);
3391 } else {
3392 __ Dsra32(AT, TMP, 31);
3393 __ Dsubu(AT, TMP, AT);
3394 __ LoadConst64(TMP, imm);
3395 __ Dmul(TMP, AT, TMP);
3396 __ Dsubu(out, dividend, TMP);
3397 }
3398 }
3399}
3400
3401void InstructionCodeGeneratorMIPS64::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3402 DCHECK(instruction->IsDiv() || instruction->IsRem());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003403 DataType::Type type = instruction->GetResultType();
3404 DCHECK(type == DataType::Type::kInt32 || type == DataType::Type::kInt64) << type;
Alexey Frunzec857c742015-09-23 15:12:39 -07003405
3406 LocationSummary* locations = instruction->GetLocations();
3407 GpuRegister out = locations->Out().AsRegister<GpuRegister>();
3408 Location second = locations->InAt(1);
3409
3410 if (second.IsConstant()) {
3411 int64_t imm = Int64FromConstant(second.GetConstant());
3412 if (imm == 0) {
3413 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3414 } else if (imm == 1 || imm == -1) {
3415 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003416 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunzec857c742015-09-23 15:12:39 -07003417 DivRemByPowerOfTwo(instruction);
3418 } else {
3419 DCHECK(imm <= -2 || imm >= 2);
3420 GenerateDivRemWithAnyConstant(instruction);
3421 }
3422 } else {
3423 GpuRegister dividend = locations->InAt(0).AsRegister<GpuRegister>();
3424 GpuRegister divisor = second.AsRegister<GpuRegister>();
3425 if (instruction->IsDiv()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003426 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003427 __ DivR6(out, dividend, divisor);
3428 else
3429 __ Ddiv(out, dividend, divisor);
3430 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003431 if (type == DataType::Type::kInt32)
Alexey Frunzec857c742015-09-23 15:12:39 -07003432 __ ModR6(out, dividend, divisor);
3433 else
3434 __ Dmod(out, dividend, divisor);
3435 }
3436 }
3437}
3438
Alexey Frunze4dda3372015-06-01 18:31:49 -07003439void LocationsBuilderMIPS64::VisitDiv(HDiv* div) {
3440 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003441 new (GetGraph()->GetAllocator()) LocationSummary(div, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003442 switch (div->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003443 case DataType::Type::kInt32:
3444 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003445 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07003446 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003447 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3448 break;
3449
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003450 case DataType::Type::kFloat32:
3451 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07003452 locations->SetInAt(0, Location::RequiresFpuRegister());
3453 locations->SetInAt(1, Location::RequiresFpuRegister());
3454 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3455 break;
3456
3457 default:
3458 LOG(FATAL) << "Unexpected div type " << div->GetResultType();
3459 }
3460}
3461
3462void InstructionCodeGeneratorMIPS64::VisitDiv(HDiv* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003463 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003464 LocationSummary* locations = instruction->GetLocations();
3465
3466 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003467 case DataType::Type::kInt32:
3468 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07003469 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003470 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003471 case DataType::Type::kFloat32:
3472 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003473 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
3474 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3475 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003476 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07003477 __ DivS(dst, lhs, rhs);
3478 else
3479 __ DivD(dst, lhs, rhs);
3480 break;
3481 }
3482 default:
3483 LOG(FATAL) << "Unexpected div type " << type;
3484 }
3485}
3486
3487void LocationsBuilderMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003488 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003489 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003490}
3491
3492void InstructionCodeGeneratorMIPS64::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3493 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01003494 new (codegen_->GetScopedAllocator()) DivZeroCheckSlowPathMIPS64(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003495 codegen_->AddSlowPath(slow_path);
3496 Location value = instruction->GetLocations()->InAt(0);
3497
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003498 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07003499
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003500 if (!DataType::IsIntegralType(type)) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003501 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
Serguei Katkov8c0676c2015-08-03 13:55:33 +06003502 return;
Alexey Frunze4dda3372015-06-01 18:31:49 -07003503 }
3504
3505 if (value.IsConstant()) {
3506 int64_t divisor = codegen_->GetInt64ValueOf(value.GetConstant()->AsConstant());
3507 if (divisor == 0) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003508 __ Bc(slow_path->GetEntryLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07003509 } else {
3510 // A division by a non-null constant is valid. We don't need to perform
3511 // any check, so simply fall through.
3512 }
3513 } else {
3514 __ Beqzc(value.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
3515 }
3516}
3517
3518void LocationsBuilderMIPS64::VisitDoubleConstant(HDoubleConstant* constant) {
3519 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003520 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003521 locations->SetOut(Location::ConstantLocation(constant));
3522}
3523
3524void InstructionCodeGeneratorMIPS64::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3525 // Will be generated at use site.
3526}
3527
3528void LocationsBuilderMIPS64::VisitExit(HExit* exit) {
3529 exit->SetLocations(nullptr);
3530}
3531
3532void InstructionCodeGeneratorMIPS64::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3533}
3534
3535void LocationsBuilderMIPS64::VisitFloatConstant(HFloatConstant* constant) {
3536 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01003537 new (GetGraph()->GetAllocator()) LocationSummary(constant, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07003538 locations->SetOut(Location::ConstantLocation(constant));
3539}
3540
3541void InstructionCodeGeneratorMIPS64::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3542 // Will be generated at use site.
3543}
3544
David Brazdilfc6a86a2015-06-26 10:33:45 +00003545void InstructionCodeGeneratorMIPS64::HandleGoto(HInstruction* got, HBasicBlock* successor) {
Aart Bika8b8e9b2018-01-09 11:01:02 -08003546 if (successor->IsExitBlock()) {
3547 DCHECK(got->GetPrevious()->AlwaysThrows());
3548 return; // no code needed
3549 }
3550
Alexey Frunze4dda3372015-06-01 18:31:49 -07003551 HBasicBlock* block = got->GetBlock();
3552 HInstruction* previous = got->GetPrevious();
3553 HLoopInformation* info = block->GetLoopInformation();
3554
3555 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07003556 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3557 return;
3558 }
3559 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3560 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3561 }
3562 if (!codegen_->GoesToNextBlock(block, successor)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07003563 __ Bc(codegen_->GetLabelOf(successor));
Alexey Frunze4dda3372015-06-01 18:31:49 -07003564 }
3565}
3566
David Brazdilfc6a86a2015-06-26 10:33:45 +00003567void LocationsBuilderMIPS64::VisitGoto(HGoto* got) {
3568 got->SetLocations(nullptr);
3569}
3570
3571void InstructionCodeGeneratorMIPS64::VisitGoto(HGoto* got) {
3572 HandleGoto(got, got->GetSuccessor());
3573}
3574
3575void LocationsBuilderMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3576 try_boundary->SetLocations(nullptr);
3577}
3578
3579void InstructionCodeGeneratorMIPS64::VisitTryBoundary(HTryBoundary* try_boundary) {
3580 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3581 if (!successor->IsExitBlock()) {
3582 HandleGoto(try_boundary, successor);
3583 }
3584}
3585
Alexey Frunze299a9392015-12-08 16:08:02 -08003586void InstructionCodeGeneratorMIPS64::GenerateIntLongCompare(IfCondition cond,
3587 bool is64bit,
3588 LocationSummary* locations) {
3589 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3590 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3591 Location rhs_location = locations->InAt(1);
3592 GpuRegister rhs_reg = ZERO;
3593 int64_t rhs_imm = 0;
3594 bool use_imm = rhs_location.IsConstant();
3595 if (use_imm) {
3596 if (is64bit) {
3597 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3598 } else {
3599 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3600 }
3601 } else {
3602 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3603 }
3604 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3605
3606 switch (cond) {
3607 case kCondEQ:
3608 case kCondNE:
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003609 if (use_imm && IsInt<16>(-rhs_imm)) {
3610 if (rhs_imm == 0) {
3611 if (cond == kCondEQ) {
3612 __ Sltiu(dst, lhs, 1);
3613 } else {
3614 __ Sltu(dst, ZERO, lhs);
3615 }
3616 } else {
3617 if (is64bit) {
3618 __ Daddiu(dst, lhs, -rhs_imm);
3619 } else {
3620 __ Addiu(dst, lhs, -rhs_imm);
3621 }
3622 if (cond == kCondEQ) {
3623 __ Sltiu(dst, dst, 1);
3624 } else {
3625 __ Sltu(dst, ZERO, dst);
3626 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003627 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003628 } else {
Goran Jakovljevicdb3deee2016-12-28 14:33:21 +01003629 if (use_imm && IsUint<16>(rhs_imm)) {
3630 __ Xori(dst, lhs, rhs_imm);
3631 } else {
3632 if (use_imm) {
3633 rhs_reg = TMP;
3634 __ LoadConst64(rhs_reg, rhs_imm);
3635 }
3636 __ Xor(dst, lhs, rhs_reg);
3637 }
3638 if (cond == kCondEQ) {
3639 __ Sltiu(dst, dst, 1);
3640 } else {
3641 __ Sltu(dst, ZERO, dst);
3642 }
Alexey Frunze299a9392015-12-08 16:08:02 -08003643 }
3644 break;
3645
3646 case kCondLT:
3647 case kCondGE:
3648 if (use_imm && IsInt<16>(rhs_imm)) {
3649 __ Slti(dst, lhs, rhs_imm);
3650 } else {
3651 if (use_imm) {
3652 rhs_reg = TMP;
3653 __ LoadConst64(rhs_reg, rhs_imm);
3654 }
3655 __ Slt(dst, lhs, rhs_reg);
3656 }
3657 if (cond == kCondGE) {
3658 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3659 // only the slt instruction but no sge.
3660 __ Xori(dst, dst, 1);
3661 }
3662 break;
3663
3664 case kCondLE:
3665 case kCondGT:
3666 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3667 // Simulate lhs <= rhs via lhs < rhs + 1.
3668 __ Slti(dst, lhs, rhs_imm_plus_one);
3669 if (cond == kCondGT) {
3670 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3671 // only the slti instruction but no sgti.
3672 __ Xori(dst, dst, 1);
3673 }
3674 } else {
3675 if (use_imm) {
3676 rhs_reg = TMP;
3677 __ LoadConst64(rhs_reg, rhs_imm);
3678 }
3679 __ Slt(dst, rhs_reg, lhs);
3680 if (cond == kCondLE) {
3681 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3682 // only the slt instruction but no sle.
3683 __ Xori(dst, dst, 1);
3684 }
3685 }
3686 break;
3687
3688 case kCondB:
3689 case kCondAE:
3690 if (use_imm && IsInt<16>(rhs_imm)) {
3691 // Sltiu sign-extends its 16-bit immediate operand before
3692 // the comparison and thus lets us compare directly with
3693 // unsigned values in the ranges [0, 0x7fff] and
3694 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3695 __ Sltiu(dst, lhs, rhs_imm);
3696 } else {
3697 if (use_imm) {
3698 rhs_reg = TMP;
3699 __ LoadConst64(rhs_reg, rhs_imm);
3700 }
3701 __ Sltu(dst, lhs, rhs_reg);
3702 }
3703 if (cond == kCondAE) {
3704 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3705 // only the sltu instruction but no sgeu.
3706 __ Xori(dst, dst, 1);
3707 }
3708 break;
3709
3710 case kCondBE:
3711 case kCondA:
3712 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3713 // Simulate lhs <= rhs via lhs < rhs + 1.
3714 // Note that this only works if rhs + 1 does not overflow
3715 // to 0, hence the check above.
3716 // Sltiu sign-extends its 16-bit immediate operand before
3717 // the comparison and thus lets us compare directly with
3718 // unsigned values in the ranges [0, 0x7fff] and
3719 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3720 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3721 if (cond == kCondA) {
3722 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3723 // only the sltiu instruction but no sgtiu.
3724 __ Xori(dst, dst, 1);
3725 }
3726 } else {
3727 if (use_imm) {
3728 rhs_reg = TMP;
3729 __ LoadConst64(rhs_reg, rhs_imm);
3730 }
3731 __ Sltu(dst, rhs_reg, lhs);
3732 if (cond == kCondBE) {
3733 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3734 // only the sltu instruction but no sleu.
3735 __ Xori(dst, dst, 1);
3736 }
3737 }
3738 break;
3739 }
3740}
3741
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02003742bool InstructionCodeGeneratorMIPS64::MaterializeIntLongCompare(IfCondition cond,
3743 bool is64bit,
3744 LocationSummary* input_locations,
3745 GpuRegister dst) {
3746 GpuRegister lhs = input_locations->InAt(0).AsRegister<GpuRegister>();
3747 Location rhs_location = input_locations->InAt(1);
3748 GpuRegister rhs_reg = ZERO;
3749 int64_t rhs_imm = 0;
3750 bool use_imm = rhs_location.IsConstant();
3751 if (use_imm) {
3752 if (is64bit) {
3753 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3754 } else {
3755 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3756 }
3757 } else {
3758 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3759 }
3760 int64_t rhs_imm_plus_one = rhs_imm + UINT64_C(1);
3761
3762 switch (cond) {
3763 case kCondEQ:
3764 case kCondNE:
3765 if (use_imm && IsInt<16>(-rhs_imm)) {
3766 if (is64bit) {
3767 __ Daddiu(dst, lhs, -rhs_imm);
3768 } else {
3769 __ Addiu(dst, lhs, -rhs_imm);
3770 }
3771 } else if (use_imm && IsUint<16>(rhs_imm)) {
3772 __ Xori(dst, lhs, rhs_imm);
3773 } else {
3774 if (use_imm) {
3775 rhs_reg = TMP;
3776 __ LoadConst64(rhs_reg, rhs_imm);
3777 }
3778 __ Xor(dst, lhs, rhs_reg);
3779 }
3780 return (cond == kCondEQ);
3781
3782 case kCondLT:
3783 case kCondGE:
3784 if (use_imm && IsInt<16>(rhs_imm)) {
3785 __ Slti(dst, lhs, rhs_imm);
3786 } else {
3787 if (use_imm) {
3788 rhs_reg = TMP;
3789 __ LoadConst64(rhs_reg, rhs_imm);
3790 }
3791 __ Slt(dst, lhs, rhs_reg);
3792 }
3793 return (cond == kCondGE);
3794
3795 case kCondLE:
3796 case kCondGT:
3797 if (use_imm && IsInt<16>(rhs_imm_plus_one)) {
3798 // Simulate lhs <= rhs via lhs < rhs + 1.
3799 __ Slti(dst, lhs, rhs_imm_plus_one);
3800 return (cond == kCondGT);
3801 } else {
3802 if (use_imm) {
3803 rhs_reg = TMP;
3804 __ LoadConst64(rhs_reg, rhs_imm);
3805 }
3806 __ Slt(dst, rhs_reg, lhs);
3807 return (cond == kCondLE);
3808 }
3809
3810 case kCondB:
3811 case kCondAE:
3812 if (use_imm && IsInt<16>(rhs_imm)) {
3813 // Sltiu sign-extends its 16-bit immediate operand before
3814 // the comparison and thus lets us compare directly with
3815 // unsigned values in the ranges [0, 0x7fff] and
3816 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3817 __ Sltiu(dst, lhs, rhs_imm);
3818 } else {
3819 if (use_imm) {
3820 rhs_reg = TMP;
3821 __ LoadConst64(rhs_reg, rhs_imm);
3822 }
3823 __ Sltu(dst, lhs, rhs_reg);
3824 }
3825 return (cond == kCondAE);
3826
3827 case kCondBE:
3828 case kCondA:
3829 if (use_imm && (rhs_imm_plus_one != 0) && IsInt<16>(rhs_imm_plus_one)) {
3830 // Simulate lhs <= rhs via lhs < rhs + 1.
3831 // Note that this only works if rhs + 1 does not overflow
3832 // to 0, hence the check above.
3833 // Sltiu sign-extends its 16-bit immediate operand before
3834 // the comparison and thus lets us compare directly with
3835 // unsigned values in the ranges [0, 0x7fff] and
3836 // [0x[ffffffff]ffff8000, 0x[ffffffff]ffffffff].
3837 __ Sltiu(dst, lhs, rhs_imm_plus_one);
3838 return (cond == kCondA);
3839 } else {
3840 if (use_imm) {
3841 rhs_reg = TMP;
3842 __ LoadConst64(rhs_reg, rhs_imm);
3843 }
3844 __ Sltu(dst, rhs_reg, lhs);
3845 return (cond == kCondBE);
3846 }
3847 }
3848}
3849
Alexey Frunze299a9392015-12-08 16:08:02 -08003850void InstructionCodeGeneratorMIPS64::GenerateIntLongCompareAndBranch(IfCondition cond,
3851 bool is64bit,
3852 LocationSummary* locations,
3853 Mips64Label* label) {
3854 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
3855 Location rhs_location = locations->InAt(1);
3856 GpuRegister rhs_reg = ZERO;
3857 int64_t rhs_imm = 0;
3858 bool use_imm = rhs_location.IsConstant();
3859 if (use_imm) {
3860 if (is64bit) {
3861 rhs_imm = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant());
3862 } else {
3863 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3864 }
3865 } else {
3866 rhs_reg = rhs_location.AsRegister<GpuRegister>();
3867 }
3868
3869 if (use_imm && rhs_imm == 0) {
3870 switch (cond) {
3871 case kCondEQ:
3872 case kCondBE: // <= 0 if zero
3873 __ Beqzc(lhs, label);
3874 break;
3875 case kCondNE:
3876 case kCondA: // > 0 if non-zero
3877 __ Bnezc(lhs, label);
3878 break;
3879 case kCondLT:
3880 __ Bltzc(lhs, label);
3881 break;
3882 case kCondGE:
3883 __ Bgezc(lhs, label);
3884 break;
3885 case kCondLE:
3886 __ Blezc(lhs, label);
3887 break;
3888 case kCondGT:
3889 __ Bgtzc(lhs, label);
3890 break;
3891 case kCondB: // always false
3892 break;
3893 case kCondAE: // always true
3894 __ Bc(label);
3895 break;
3896 }
3897 } else {
3898 if (use_imm) {
3899 rhs_reg = TMP;
3900 __ LoadConst64(rhs_reg, rhs_imm);
3901 }
3902 switch (cond) {
3903 case kCondEQ:
3904 __ Beqc(lhs, rhs_reg, label);
3905 break;
3906 case kCondNE:
3907 __ Bnec(lhs, rhs_reg, label);
3908 break;
3909 case kCondLT:
3910 __ Bltc(lhs, rhs_reg, label);
3911 break;
3912 case kCondGE:
3913 __ Bgec(lhs, rhs_reg, label);
3914 break;
3915 case kCondLE:
3916 __ Bgec(rhs_reg, lhs, label);
3917 break;
3918 case kCondGT:
3919 __ Bltc(rhs_reg, lhs, label);
3920 break;
3921 case kCondB:
3922 __ Bltuc(lhs, rhs_reg, label);
3923 break;
3924 case kCondAE:
3925 __ Bgeuc(lhs, rhs_reg, label);
3926 break;
3927 case kCondBE:
3928 __ Bgeuc(rhs_reg, lhs, label);
3929 break;
3930 case kCondA:
3931 __ Bltuc(rhs_reg, lhs, label);
3932 break;
3933 }
3934 }
3935}
3936
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003937void InstructionCodeGeneratorMIPS64::GenerateFpCompare(IfCondition cond,
3938 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003939 DataType::Type type,
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003940 LocationSummary* locations) {
3941 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
3942 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
3943 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003944 if (type == DataType::Type::kFloat32) {
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003945 switch (cond) {
3946 case kCondEQ:
3947 __ CmpEqS(FTMP, lhs, rhs);
3948 __ Mfc1(dst, FTMP);
3949 __ Andi(dst, dst, 1);
3950 break;
3951 case kCondNE:
3952 __ CmpEqS(FTMP, lhs, rhs);
3953 __ Mfc1(dst, FTMP);
3954 __ Addiu(dst, dst, 1);
3955 break;
3956 case kCondLT:
3957 if (gt_bias) {
3958 __ CmpLtS(FTMP, lhs, rhs);
3959 } else {
3960 __ CmpUltS(FTMP, lhs, rhs);
3961 }
3962 __ Mfc1(dst, FTMP);
3963 __ Andi(dst, dst, 1);
3964 break;
3965 case kCondLE:
3966 if (gt_bias) {
3967 __ CmpLeS(FTMP, lhs, rhs);
3968 } else {
3969 __ CmpUleS(FTMP, lhs, rhs);
3970 }
3971 __ Mfc1(dst, FTMP);
3972 __ Andi(dst, dst, 1);
3973 break;
3974 case kCondGT:
3975 if (gt_bias) {
3976 __ CmpUltS(FTMP, rhs, lhs);
3977 } else {
3978 __ CmpLtS(FTMP, rhs, lhs);
3979 }
3980 __ Mfc1(dst, FTMP);
3981 __ Andi(dst, dst, 1);
3982 break;
3983 case kCondGE:
3984 if (gt_bias) {
3985 __ CmpUleS(FTMP, rhs, lhs);
3986 } else {
3987 __ CmpLeS(FTMP, rhs, lhs);
3988 }
3989 __ Mfc1(dst, FTMP);
3990 __ Andi(dst, dst, 1);
3991 break;
3992 default:
3993 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
3994 UNREACHABLE();
3995 }
3996 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01003997 DCHECK_EQ(type, DataType::Type::kFloat64);
Tijana Jakovljevic43758192016-12-30 09:23:01 +01003998 switch (cond) {
3999 case kCondEQ:
4000 __ CmpEqD(FTMP, lhs, rhs);
4001 __ Mfc1(dst, FTMP);
4002 __ Andi(dst, dst, 1);
4003 break;
4004 case kCondNE:
4005 __ CmpEqD(FTMP, lhs, rhs);
4006 __ Mfc1(dst, FTMP);
4007 __ Addiu(dst, dst, 1);
4008 break;
4009 case kCondLT:
4010 if (gt_bias) {
4011 __ CmpLtD(FTMP, lhs, rhs);
4012 } else {
4013 __ CmpUltD(FTMP, lhs, rhs);
4014 }
4015 __ Mfc1(dst, FTMP);
4016 __ Andi(dst, dst, 1);
4017 break;
4018 case kCondLE:
4019 if (gt_bias) {
4020 __ CmpLeD(FTMP, lhs, rhs);
4021 } else {
4022 __ CmpUleD(FTMP, lhs, rhs);
4023 }
4024 __ Mfc1(dst, FTMP);
4025 __ Andi(dst, dst, 1);
4026 break;
4027 case kCondGT:
4028 if (gt_bias) {
4029 __ CmpUltD(FTMP, rhs, lhs);
4030 } else {
4031 __ CmpLtD(FTMP, rhs, lhs);
4032 }
4033 __ Mfc1(dst, FTMP);
4034 __ Andi(dst, dst, 1);
4035 break;
4036 case kCondGE:
4037 if (gt_bias) {
4038 __ CmpUleD(FTMP, rhs, lhs);
4039 } else {
4040 __ CmpLeD(FTMP, rhs, lhs);
4041 }
4042 __ Mfc1(dst, FTMP);
4043 __ Andi(dst, dst, 1);
4044 break;
4045 default:
4046 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4047 UNREACHABLE();
4048 }
4049 }
4050}
4051
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004052bool InstructionCodeGeneratorMIPS64::MaterializeFpCompare(IfCondition cond,
4053 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004054 DataType::Type type,
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004055 LocationSummary* input_locations,
4056 FpuRegister dst) {
4057 FpuRegister lhs = input_locations->InAt(0).AsFpuRegister<FpuRegister>();
4058 FpuRegister rhs = input_locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004059 if (type == DataType::Type::kFloat32) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004060 switch (cond) {
4061 case kCondEQ:
4062 __ CmpEqS(dst, lhs, rhs);
4063 return false;
4064 case kCondNE:
4065 __ CmpEqS(dst, lhs, rhs);
4066 return true;
4067 case kCondLT:
4068 if (gt_bias) {
4069 __ CmpLtS(dst, lhs, rhs);
4070 } else {
4071 __ CmpUltS(dst, lhs, rhs);
4072 }
4073 return false;
4074 case kCondLE:
4075 if (gt_bias) {
4076 __ CmpLeS(dst, lhs, rhs);
4077 } else {
4078 __ CmpUleS(dst, lhs, rhs);
4079 }
4080 return false;
4081 case kCondGT:
4082 if (gt_bias) {
4083 __ CmpUltS(dst, rhs, lhs);
4084 } else {
4085 __ CmpLtS(dst, rhs, lhs);
4086 }
4087 return false;
4088 case kCondGE:
4089 if (gt_bias) {
4090 __ CmpUleS(dst, rhs, lhs);
4091 } else {
4092 __ CmpLeS(dst, rhs, lhs);
4093 }
4094 return false;
4095 default:
4096 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4097 UNREACHABLE();
4098 }
4099 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004100 DCHECK_EQ(type, DataType::Type::kFloat64);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004101 switch (cond) {
4102 case kCondEQ:
4103 __ CmpEqD(dst, lhs, rhs);
4104 return false;
4105 case kCondNE:
4106 __ CmpEqD(dst, lhs, rhs);
4107 return true;
4108 case kCondLT:
4109 if (gt_bias) {
4110 __ CmpLtD(dst, lhs, rhs);
4111 } else {
4112 __ CmpUltD(dst, lhs, rhs);
4113 }
4114 return false;
4115 case kCondLE:
4116 if (gt_bias) {
4117 __ CmpLeD(dst, lhs, rhs);
4118 } else {
4119 __ CmpUleD(dst, lhs, rhs);
4120 }
4121 return false;
4122 case kCondGT:
4123 if (gt_bias) {
4124 __ CmpUltD(dst, rhs, lhs);
4125 } else {
4126 __ CmpLtD(dst, rhs, lhs);
4127 }
4128 return false;
4129 case kCondGE:
4130 if (gt_bias) {
4131 __ CmpUleD(dst, rhs, lhs);
4132 } else {
4133 __ CmpLeD(dst, rhs, lhs);
4134 }
4135 return false;
4136 default:
4137 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4138 UNREACHABLE();
4139 }
4140 }
4141}
4142
Alexey Frunze299a9392015-12-08 16:08:02 -08004143void InstructionCodeGeneratorMIPS64::GenerateFpCompareAndBranch(IfCondition cond,
4144 bool gt_bias,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004145 DataType::Type type,
Alexey Frunze299a9392015-12-08 16:08:02 -08004146 LocationSummary* locations,
4147 Mips64Label* label) {
4148 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
4149 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004150 if (type == DataType::Type::kFloat32) {
Alexey Frunze299a9392015-12-08 16:08:02 -08004151 switch (cond) {
4152 case kCondEQ:
4153 __ CmpEqS(FTMP, lhs, rhs);
4154 __ Bc1nez(FTMP, label);
4155 break;
4156 case kCondNE:
4157 __ CmpEqS(FTMP, lhs, rhs);
4158 __ Bc1eqz(FTMP, label);
4159 break;
4160 case kCondLT:
4161 if (gt_bias) {
4162 __ CmpLtS(FTMP, lhs, rhs);
4163 } else {
4164 __ CmpUltS(FTMP, lhs, rhs);
4165 }
4166 __ Bc1nez(FTMP, label);
4167 break;
4168 case kCondLE:
4169 if (gt_bias) {
4170 __ CmpLeS(FTMP, lhs, rhs);
4171 } else {
4172 __ CmpUleS(FTMP, lhs, rhs);
4173 }
4174 __ Bc1nez(FTMP, label);
4175 break;
4176 case kCondGT:
4177 if (gt_bias) {
4178 __ CmpUltS(FTMP, rhs, lhs);
4179 } else {
4180 __ CmpLtS(FTMP, rhs, lhs);
4181 }
4182 __ Bc1nez(FTMP, label);
4183 break;
4184 case kCondGE:
4185 if (gt_bias) {
4186 __ CmpUleS(FTMP, rhs, lhs);
4187 } else {
4188 __ CmpLeS(FTMP, rhs, lhs);
4189 }
4190 __ Bc1nez(FTMP, label);
4191 break;
4192 default:
4193 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004194 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004195 }
4196 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004197 DCHECK_EQ(type, DataType::Type::kFloat64);
Alexey Frunze299a9392015-12-08 16:08:02 -08004198 switch (cond) {
4199 case kCondEQ:
4200 __ CmpEqD(FTMP, lhs, rhs);
4201 __ Bc1nez(FTMP, label);
4202 break;
4203 case kCondNE:
4204 __ CmpEqD(FTMP, lhs, rhs);
4205 __ Bc1eqz(FTMP, label);
4206 break;
4207 case kCondLT:
4208 if (gt_bias) {
4209 __ CmpLtD(FTMP, lhs, rhs);
4210 } else {
4211 __ CmpUltD(FTMP, lhs, rhs);
4212 }
4213 __ Bc1nez(FTMP, label);
4214 break;
4215 case kCondLE:
4216 if (gt_bias) {
4217 __ CmpLeD(FTMP, lhs, rhs);
4218 } else {
4219 __ CmpUleD(FTMP, lhs, rhs);
4220 }
4221 __ Bc1nez(FTMP, label);
4222 break;
4223 case kCondGT:
4224 if (gt_bias) {
4225 __ CmpUltD(FTMP, rhs, lhs);
4226 } else {
4227 __ CmpLtD(FTMP, rhs, lhs);
4228 }
4229 __ Bc1nez(FTMP, label);
4230 break;
4231 case kCondGE:
4232 if (gt_bias) {
4233 __ CmpUleD(FTMP, rhs, lhs);
4234 } else {
4235 __ CmpLeD(FTMP, rhs, lhs);
4236 }
4237 __ Bc1nez(FTMP, label);
4238 break;
4239 default:
4240 LOG(FATAL) << "Unexpected non-floating-point condition";
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004241 UNREACHABLE();
Alexey Frunze299a9392015-12-08 16:08:02 -08004242 }
4243 }
4244}
4245
Alexey Frunze4dda3372015-06-01 18:31:49 -07004246void InstructionCodeGeneratorMIPS64::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00004247 size_t condition_input_index,
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004248 Mips64Label* true_target,
4249 Mips64Label* false_target) {
David Brazdil0debae72015-11-12 18:37:00 +00004250 HInstruction* cond = instruction->InputAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004251
David Brazdil0debae72015-11-12 18:37:00 +00004252 if (true_target == nullptr && false_target == nullptr) {
4253 // Nothing to do. The code always falls through.
4254 return;
4255 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00004256 // Constant condition, statically compared against "true" (integer value 1).
4257 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00004258 if (true_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004259 __ Bc(true_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004260 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004261 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00004262 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00004263 if (false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004264 __ Bc(false_target);
David Brazdil0debae72015-11-12 18:37:00 +00004265 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004266 }
David Brazdil0debae72015-11-12 18:37:00 +00004267 return;
4268 }
4269
4270 // The following code generates these patterns:
4271 // (1) true_target == nullptr && false_target != nullptr
4272 // - opposite condition true => branch to false_target
4273 // (2) true_target != nullptr && false_target == nullptr
4274 // - condition true => branch to true_target
4275 // (3) true_target != nullptr && false_target != nullptr
4276 // - condition true => branch to true_target
4277 // - branch to false_target
4278 if (IsBooleanValueOrMaterializedCondition(cond)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004279 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00004280 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004281 DCHECK(cond_val.IsRegister());
David Brazdil0debae72015-11-12 18:37:00 +00004282 if (true_target == nullptr) {
4283 __ Beqzc(cond_val.AsRegister<GpuRegister>(), false_target);
4284 } else {
4285 __ Bnezc(cond_val.AsRegister<GpuRegister>(), true_target);
4286 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004287 } else {
4288 // The condition instruction has not been materialized, use its inputs as
4289 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00004290 HCondition* condition = cond->AsCondition();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004291 DataType::Type type = condition->InputAt(0)->GetType();
Alexey Frunze299a9392015-12-08 16:08:02 -08004292 LocationSummary* locations = cond->GetLocations();
4293 IfCondition if_cond = condition->GetCondition();
4294 Mips64Label* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00004295
David Brazdil0debae72015-11-12 18:37:00 +00004296 if (true_target == nullptr) {
4297 if_cond = condition->GetOppositeCondition();
Alexey Frunze299a9392015-12-08 16:08:02 -08004298 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00004299 }
4300
Alexey Frunze299a9392015-12-08 16:08:02 -08004301 switch (type) {
4302 default:
4303 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ false, locations, branch_target);
4304 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004305 case DataType::Type::kInt64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004306 GenerateIntLongCompareAndBranch(if_cond, /* is64bit */ true, locations, branch_target);
4307 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004308 case DataType::Type::kFloat32:
4309 case DataType::Type::kFloat64:
Alexey Frunze299a9392015-12-08 16:08:02 -08004310 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
4311 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07004312 }
4313 }
David Brazdil0debae72015-11-12 18:37:00 +00004314
4315 // If neither branch falls through (case 3), the conditional branch to `true_target`
4316 // was already emitted (case 2) and we need to emit a jump to `false_target`.
4317 if (true_target != nullptr && false_target != nullptr) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004318 __ Bc(false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004319 }
4320}
4321
4322void LocationsBuilderMIPS64::VisitIf(HIf* if_instr) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004323 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00004324 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004325 locations->SetInAt(0, Location::RequiresRegister());
4326 }
4327}
4328
4329void InstructionCodeGeneratorMIPS64::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00004330 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
4331 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004332 Mips64Label* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004333 nullptr : codegen_->GetLabelOf(true_successor);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07004334 Mips64Label* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
David Brazdil0debae72015-11-12 18:37:00 +00004335 nullptr : codegen_->GetLabelOf(false_successor);
4336 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004337}
4338
4339void LocationsBuilderMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004340 LocationSummary* locations = new (GetGraph()->GetAllocator())
Alexey Frunze4dda3372015-06-01 18:31:49 -07004341 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01004342 InvokeRuntimeCallingConvention calling_convention;
4343 RegisterSet caller_saves = RegisterSet::Empty();
4344 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
4345 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00004346 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004347 locations->SetInAt(0, Location::RequiresRegister());
4348 }
4349}
4350
4351void InstructionCodeGeneratorMIPS64::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08004352 SlowPathCodeMIPS64* slow_path =
4353 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS64>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00004354 GenerateTestAndBranch(deoptimize,
4355 /* condition_input_index */ 0,
4356 slow_path->GetEntryLabel(),
4357 /* false_target */ nullptr);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004358}
4359
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004360// This function returns true if a conditional move can be generated for HSelect.
4361// Otherwise it returns false and HSelect must be implemented in terms of conditonal
4362// branches and regular moves.
4363//
4364// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
4365//
4366// While determining feasibility of a conditional move and setting inputs/outputs
4367// are two distinct tasks, this function does both because they share quite a bit
4368// of common logic.
4369static bool CanMoveConditionally(HSelect* select, LocationSummary* locations_to_set) {
4370 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
4371 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4372 HCondition* condition = cond->AsCondition();
4373
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004374 DataType::Type cond_type =
4375 materialized ? DataType::Type::kInt32 : condition->InputAt(0)->GetType();
4376 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004377
4378 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
4379 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
4380 bool is_true_value_zero_constant =
4381 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
4382 bool is_false_value_zero_constant =
4383 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
4384
4385 bool can_move_conditionally = false;
4386 bool use_const_for_false_in = false;
4387 bool use_const_for_true_in = false;
4388
4389 if (!cond->IsConstant()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004390 if (!DataType::IsFloatingPointType(cond_type)) {
4391 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004392 // Moving int/long on int/long condition.
4393 if (is_true_value_zero_constant) {
4394 // seleqz out_reg, false_reg, cond_reg
4395 can_move_conditionally = true;
4396 use_const_for_true_in = true;
4397 } else if (is_false_value_zero_constant) {
4398 // selnez out_reg, true_reg, cond_reg
4399 can_move_conditionally = true;
4400 use_const_for_false_in = true;
4401 } else if (materialized) {
4402 // Not materializing unmaterialized int conditions
4403 // to keep the instruction count low.
4404 // selnez AT, true_reg, cond_reg
4405 // seleqz TMP, false_reg, cond_reg
4406 // or out_reg, AT, TMP
4407 can_move_conditionally = true;
4408 }
4409 } else {
4410 // Moving float/double on int/long condition.
4411 if (materialized) {
4412 // Not materializing unmaterialized int conditions
4413 // to keep the instruction count low.
4414 can_move_conditionally = true;
4415 if (is_true_value_zero_constant) {
4416 // sltu TMP, ZERO, cond_reg
4417 // mtc1 TMP, temp_cond_reg
4418 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4419 use_const_for_true_in = true;
4420 } else if (is_false_value_zero_constant) {
4421 // sltu TMP, ZERO, cond_reg
4422 // mtc1 TMP, temp_cond_reg
4423 // selnez.fmt out_reg, true_reg, temp_cond_reg
4424 use_const_for_false_in = true;
4425 } else {
4426 // sltu TMP, ZERO, cond_reg
4427 // mtc1 TMP, temp_cond_reg
4428 // sel.fmt temp_cond_reg, false_reg, true_reg
4429 // mov.fmt out_reg, temp_cond_reg
4430 }
4431 }
4432 }
4433 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004434 if (!DataType::IsFloatingPointType(dst_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004435 // Moving int/long on float/double condition.
4436 can_move_conditionally = true;
4437 if (is_true_value_zero_constant) {
4438 // mfc1 TMP, temp_cond_reg
4439 // seleqz out_reg, false_reg, TMP
4440 use_const_for_true_in = true;
4441 } else if (is_false_value_zero_constant) {
4442 // mfc1 TMP, temp_cond_reg
4443 // selnez out_reg, true_reg, TMP
4444 use_const_for_false_in = true;
4445 } else {
4446 // mfc1 TMP, temp_cond_reg
4447 // selnez AT, true_reg, TMP
4448 // seleqz TMP, false_reg, TMP
4449 // or out_reg, AT, TMP
4450 }
4451 } else {
4452 // Moving float/double on float/double condition.
4453 can_move_conditionally = true;
4454 if (is_true_value_zero_constant) {
4455 // seleqz.fmt out_reg, false_reg, temp_cond_reg
4456 use_const_for_true_in = true;
4457 } else if (is_false_value_zero_constant) {
4458 // selnez.fmt out_reg, true_reg, temp_cond_reg
4459 use_const_for_false_in = true;
4460 } else {
4461 // sel.fmt temp_cond_reg, false_reg, true_reg
4462 // mov.fmt out_reg, temp_cond_reg
4463 }
4464 }
4465 }
4466 }
4467
4468 if (can_move_conditionally) {
4469 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
4470 } else {
4471 DCHECK(!use_const_for_false_in);
4472 DCHECK(!use_const_for_true_in);
4473 }
4474
4475 if (locations_to_set != nullptr) {
4476 if (use_const_for_false_in) {
4477 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
4478 } else {
4479 locations_to_set->SetInAt(0,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004480 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004481 ? Location::RequiresFpuRegister()
4482 : Location::RequiresRegister());
4483 }
4484 if (use_const_for_true_in) {
4485 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
4486 } else {
4487 locations_to_set->SetInAt(1,
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004488 DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004489 ? Location::RequiresFpuRegister()
4490 : Location::RequiresRegister());
4491 }
4492 if (materialized) {
4493 locations_to_set->SetInAt(2, Location::RequiresRegister());
4494 }
4495
4496 if (can_move_conditionally) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004497 locations_to_set->SetOut(DataType::IsFloatingPointType(dst_type)
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004498 ? Location::RequiresFpuRegister()
4499 : Location::RequiresRegister());
4500 } else {
4501 locations_to_set->SetOut(Location::SameAsFirstInput());
4502 }
4503 }
4504
4505 return can_move_conditionally;
4506}
4507
4508
4509void InstructionCodeGeneratorMIPS64::GenConditionalMove(HSelect* select) {
4510 LocationSummary* locations = select->GetLocations();
4511 Location dst = locations->Out();
4512 Location false_src = locations->InAt(0);
4513 Location true_src = locations->InAt(1);
4514 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
4515 GpuRegister cond_reg = TMP;
4516 FpuRegister fcond_reg = FTMP;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004517 DataType::Type cond_type = DataType::Type::kInt32;
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004518 bool cond_inverted = false;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004519 DataType::Type dst_type = select->GetType();
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004520
4521 if (IsBooleanValueOrMaterializedCondition(cond)) {
4522 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<GpuRegister>();
4523 } else {
4524 HCondition* condition = cond->AsCondition();
4525 LocationSummary* cond_locations = cond->GetLocations();
4526 IfCondition if_cond = condition->GetCondition();
4527 cond_type = condition->InputAt(0)->GetType();
4528 switch (cond_type) {
4529 default:
4530 cond_inverted = MaterializeIntLongCompare(if_cond,
4531 /* is64bit */ false,
4532 cond_locations,
4533 cond_reg);
4534 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004535 case DataType::Type::kInt64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004536 cond_inverted = MaterializeIntLongCompare(if_cond,
4537 /* is64bit */ true,
4538 cond_locations,
4539 cond_reg);
4540 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004541 case DataType::Type::kFloat32:
4542 case DataType::Type::kFloat64:
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004543 cond_inverted = MaterializeFpCompare(if_cond,
4544 condition->IsGtBias(),
4545 cond_type,
4546 cond_locations,
4547 fcond_reg);
4548 break;
4549 }
4550 }
4551
4552 if (true_src.IsConstant()) {
4553 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
4554 }
4555 if (false_src.IsConstant()) {
4556 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
4557 }
4558
4559 switch (dst_type) {
4560 default:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004561 if (DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004562 __ Mfc1(cond_reg, fcond_reg);
4563 }
4564 if (true_src.IsConstant()) {
4565 if (cond_inverted) {
4566 __ Selnez(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4567 } else {
4568 __ Seleqz(dst.AsRegister<GpuRegister>(), false_src.AsRegister<GpuRegister>(), cond_reg);
4569 }
4570 } else if (false_src.IsConstant()) {
4571 if (cond_inverted) {
4572 __ Seleqz(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4573 } else {
4574 __ Selnez(dst.AsRegister<GpuRegister>(), true_src.AsRegister<GpuRegister>(), cond_reg);
4575 }
4576 } else {
4577 DCHECK_NE(cond_reg, AT);
4578 if (cond_inverted) {
4579 __ Seleqz(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4580 __ Selnez(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4581 } else {
4582 __ Selnez(AT, true_src.AsRegister<GpuRegister>(), cond_reg);
4583 __ Seleqz(TMP, false_src.AsRegister<GpuRegister>(), cond_reg);
4584 }
4585 __ Or(dst.AsRegister<GpuRegister>(), AT, TMP);
4586 }
4587 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004588 case DataType::Type::kFloat32: {
4589 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004590 // sel*.fmt tests bit 0 of the condition register, account for that.
4591 __ Sltu(TMP, ZERO, cond_reg);
4592 __ Mtc1(TMP, fcond_reg);
4593 }
4594 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4595 if (true_src.IsConstant()) {
4596 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4597 if (cond_inverted) {
4598 __ SelnezS(dst_reg, src_reg, fcond_reg);
4599 } else {
4600 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4601 }
4602 } else if (false_src.IsConstant()) {
4603 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4604 if (cond_inverted) {
4605 __ SeleqzS(dst_reg, src_reg, fcond_reg);
4606 } else {
4607 __ SelnezS(dst_reg, src_reg, fcond_reg);
4608 }
4609 } else {
4610 if (cond_inverted) {
4611 __ SelS(fcond_reg,
4612 true_src.AsFpuRegister<FpuRegister>(),
4613 false_src.AsFpuRegister<FpuRegister>());
4614 } else {
4615 __ SelS(fcond_reg,
4616 false_src.AsFpuRegister<FpuRegister>(),
4617 true_src.AsFpuRegister<FpuRegister>());
4618 }
4619 __ MovS(dst_reg, fcond_reg);
4620 }
4621 break;
4622 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004623 case DataType::Type::kFloat64: {
4624 if (!DataType::IsFloatingPointType(cond_type)) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004625 // sel*.fmt tests bit 0 of the condition register, account for that.
4626 __ Sltu(TMP, ZERO, cond_reg);
4627 __ Mtc1(TMP, fcond_reg);
4628 }
4629 FpuRegister dst_reg = dst.AsFpuRegister<FpuRegister>();
4630 if (true_src.IsConstant()) {
4631 FpuRegister src_reg = false_src.AsFpuRegister<FpuRegister>();
4632 if (cond_inverted) {
4633 __ SelnezD(dst_reg, src_reg, fcond_reg);
4634 } else {
4635 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4636 }
4637 } else if (false_src.IsConstant()) {
4638 FpuRegister src_reg = true_src.AsFpuRegister<FpuRegister>();
4639 if (cond_inverted) {
4640 __ SeleqzD(dst_reg, src_reg, fcond_reg);
4641 } else {
4642 __ SelnezD(dst_reg, src_reg, fcond_reg);
4643 }
4644 } else {
4645 if (cond_inverted) {
4646 __ SelD(fcond_reg,
4647 true_src.AsFpuRegister<FpuRegister>(),
4648 false_src.AsFpuRegister<FpuRegister>());
4649 } else {
4650 __ SelD(fcond_reg,
4651 false_src.AsFpuRegister<FpuRegister>(),
4652 true_src.AsFpuRegister<FpuRegister>());
4653 }
4654 __ MovD(dst_reg, fcond_reg);
4655 }
4656 break;
4657 }
4658 }
4659}
4660
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004661void LocationsBuilderMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004662 LocationSummary* locations = new (GetGraph()->GetAllocator())
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004663 LocationSummary(flag, LocationSummary::kNoCall);
4664 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07004665}
4666
Goran Jakovljevicc6418422016-12-05 16:31:55 +01004667void InstructionCodeGeneratorMIPS64::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
4668 __ LoadFromOffset(kLoadWord,
4669 flag->GetLocations()->Out().AsRegister<GpuRegister>(),
4670 SP,
4671 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07004672}
4673
David Brazdil74eb1b22015-12-14 11:44:01 +00004674void LocationsBuilderMIPS64::VisitSelect(HSelect* select) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004675 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(select);
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004676 CanMoveConditionally(select, locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00004677}
4678
4679void InstructionCodeGeneratorMIPS64::VisitSelect(HSelect* select) {
Goran Jakovljevic2dec9272017-08-02 11:41:26 +02004680 if (CanMoveConditionally(select, /* locations_to_set */ nullptr)) {
4681 GenConditionalMove(select);
4682 } else {
4683 LocationSummary* locations = select->GetLocations();
4684 Mips64Label false_target;
4685 GenerateTestAndBranch(select,
4686 /* condition_input_index */ 2,
4687 /* true_target */ nullptr,
4688 &false_target);
4689 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
4690 __ Bind(&false_target);
4691 }
David Brazdil74eb1b22015-12-14 11:44:01 +00004692}
4693
David Srbecky0cf44932015-12-09 14:09:59 +00004694void LocationsBuilderMIPS64::VisitNativeDebugInfo(HNativeDebugInfo* info) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01004695 new (GetGraph()->GetAllocator()) LocationSummary(info);
David Srbecky0cf44932015-12-09 14:09:59 +00004696}
4697
David Srbeckyd28f4a02016-03-14 17:14:24 +00004698void InstructionCodeGeneratorMIPS64::VisitNativeDebugInfo(HNativeDebugInfo*) {
4699 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00004700}
4701
4702void CodeGeneratorMIPS64::GenerateNop() {
4703 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00004704}
4705
Alexey Frunze4dda3372015-06-01 18:31:49 -07004706void LocationsBuilderMIPS64::HandleFieldGet(HInstruction* instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08004707 const FieldInfo& field_info) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004708 DataType::Type field_type = field_info.GetFieldType();
Alexey Frunze15958152017-02-09 19:08:30 -08004709 bool object_field_get_with_read_barrier =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004710 kEmitCompilerReadBarrier && (field_type == DataType::Type::kReference);
Vladimir Markoca6fff82017-10-03 14:49:14 +01004711 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08004712 instruction,
4713 object_field_get_with_read_barrier
4714 ? LocationSummary::kCallOnSlowPath
4715 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07004716 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4717 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
4718 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004719 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004720 if (DataType::IsFloatingPointType(instruction->GetType())) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07004721 locations->SetOut(Location::RequiresFpuRegister());
4722 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004723 // The output overlaps in the case of an object field get with
4724 // read barriers enabled: we do not want the move to overwrite the
4725 // object's location, as we need it to emit the read barrier.
4726 locations->SetOut(Location::RequiresRegister(),
4727 object_field_get_with_read_barrier
4728 ? Location::kOutputOverlap
4729 : Location::kNoOutputOverlap);
4730 }
4731 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
4732 // We need a temporary register for the read barrier marking slow
4733 // path in CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004734 if (!kBakerReadBarrierThunksEnableForFields) {
4735 locations->AddTemp(Location::RequiresRegister());
4736 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004737 }
4738}
4739
4740void InstructionCodeGeneratorMIPS64::HandleFieldGet(HInstruction* instruction,
4741 const FieldInfo& field_info) {
Vladimir Marko61b92282017-10-11 13:23:17 +01004742 DCHECK_EQ(DataType::Size(field_info.GetFieldType()), DataType::Size(instruction->GetType()));
4743 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004744 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08004745 Location obj_loc = locations->InAt(0);
4746 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
4747 Location dst_loc = locations->Out();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004748 LoadOperandType load_type = kLoadUnsignedByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004749 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004750 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004751 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4752
Alexey Frunze4dda3372015-06-01 18:31:49 -07004753 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004754 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004755 case DataType::Type::kUint8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004756 load_type = kLoadUnsignedByte;
4757 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004758 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004759 load_type = kLoadSignedByte;
4760 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004761 case DataType::Type::kUint16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004762 load_type = kLoadUnsignedHalfword;
4763 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004764 case DataType::Type::kInt16:
4765 load_type = kLoadSignedHalfword;
4766 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004767 case DataType::Type::kInt32:
4768 case DataType::Type::kFloat32:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004769 load_type = kLoadWord;
4770 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004771 case DataType::Type::kInt64:
4772 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004773 load_type = kLoadDoubleword;
4774 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004775 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004776 load_type = kLoadUnsignedWord;
4777 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004778 case DataType::Type::kUint32:
4779 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004780 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004781 LOG(FATAL) << "Unreachable type " << type;
4782 UNREACHABLE();
4783 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004784 if (!DataType::IsFloatingPointType(type)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004785 DCHECK(dst_loc.IsRegister());
4786 GpuRegister dst = dst_loc.AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004787 if (type == DataType::Type::kReference) {
Alexey Frunze15958152017-02-09 19:08:30 -08004788 // /* HeapReference<Object> */ dst = *(obj + offset)
4789 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004790 Location temp_loc =
4791 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08004792 // Note that a potential implicit null check is handled in this
4793 // CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier call.
4794 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4795 dst_loc,
4796 obj,
4797 offset,
4798 temp_loc,
4799 /* needs_null_check */ true);
4800 if (is_volatile) {
4801 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4802 }
4803 } else {
4804 __ LoadFromOffset(kLoadUnsignedWord, dst, obj, offset, null_checker);
4805 if (is_volatile) {
4806 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
4807 }
4808 // If read barriers are enabled, emit read barriers other than
4809 // Baker's using a slow path (and also unpoison the loaded
4810 // reference, if heap poisoning is enabled).
4811 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
4812 }
4813 } else {
4814 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
4815 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004816 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08004817 DCHECK(dst_loc.IsFpuRegister());
4818 FpuRegister dst = dst_loc.AsFpuRegister<FpuRegister>();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004819 __ LoadFpuFromOffset(load_type, dst, obj, offset, null_checker);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004820 }
Alexey Frunzec061de12017-02-14 13:27:23 -08004821
Alexey Frunze15958152017-02-09 19:08:30 -08004822 // Memory barriers, in the case of references, are handled in the
4823 // previous switch statement.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004824 if (is_volatile && (type != DataType::Type::kReference)) {
Alexey Frunze15958152017-02-09 19:08:30 -08004825 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
Alexey Frunzec061de12017-02-14 13:27:23 -08004826 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004827}
4828
4829void LocationsBuilderMIPS64::HandleFieldSet(HInstruction* instruction,
4830 const FieldInfo& field_info ATTRIBUTE_UNUSED) {
4831 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01004832 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004833 locations->SetInAt(0, Location::RequiresRegister());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004834 if (DataType::IsFloatingPointType(instruction->InputAt(1)->GetType())) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004835 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004836 } else {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004837 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07004838 }
4839}
4840
4841void InstructionCodeGeneratorMIPS64::HandleFieldSet(HInstruction* instruction,
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004842 const FieldInfo& field_info,
4843 bool value_can_be_null) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004844 DataType::Type type = field_info.GetFieldType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07004845 LocationSummary* locations = instruction->GetLocations();
4846 GpuRegister obj = locations->InAt(0).AsRegister<GpuRegister>();
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004847 Location value_location = locations->InAt(1);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004848 StoreOperandType store_type = kStoreByte;
Alexey Frunze15958152017-02-09 19:08:30 -08004849 bool is_volatile = field_info.IsVolatile();
Alexey Frunzec061de12017-02-14 13:27:23 -08004850 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
4851 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01004852 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
4853
Alexey Frunze4dda3372015-06-01 18:31:49 -07004854 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004855 case DataType::Type::kBool:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004856 case DataType::Type::kUint8:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004857 case DataType::Type::kInt8:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004858 store_type = kStoreByte;
4859 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004860 case DataType::Type::kUint16:
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01004861 case DataType::Type::kInt16:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004862 store_type = kStoreHalfword;
4863 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004864 case DataType::Type::kInt32:
4865 case DataType::Type::kFloat32:
4866 case DataType::Type::kReference:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004867 store_type = kStoreWord;
4868 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004869 case DataType::Type::kInt64:
4870 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004871 store_type = kStoreDoubleword;
4872 break;
Aart Bik66c158e2018-01-31 12:55:04 -08004873 case DataType::Type::kUint32:
4874 case DataType::Type::kUint64:
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004875 case DataType::Type::kVoid:
Alexey Frunze4dda3372015-06-01 18:31:49 -07004876 LOG(FATAL) << "Unreachable type " << type;
4877 UNREACHABLE();
4878 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004879
Alexey Frunze15958152017-02-09 19:08:30 -08004880 if (is_volatile) {
4881 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
4882 }
4883
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004884 if (value_location.IsConstant()) {
4885 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
4886 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
4887 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004888 if (!DataType::IsFloatingPointType(type)) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004889 DCHECK(value_location.IsRegister());
4890 GpuRegister src = value_location.AsRegister<GpuRegister>();
4891 if (kPoisonHeapReferences && needs_write_barrier) {
4892 // Note that in the case where `value` is a null reference,
4893 // we do not enter this block, as a null reference does not
4894 // need poisoning.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01004895 DCHECK_EQ(type, DataType::Type::kReference);
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004896 __ PoisonHeapReference(TMP, src);
4897 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
4898 } else {
4899 __ StoreToOffset(store_type, src, obj, offset, null_checker);
4900 }
4901 } else {
4902 DCHECK(value_location.IsFpuRegister());
4903 FpuRegister src = value_location.AsFpuRegister<FpuRegister>();
4904 __ StoreFpuToOffset(store_type, src, obj, offset, null_checker);
4905 }
4906 }
Alexey Frunze15958152017-02-09 19:08:30 -08004907
Alexey Frunzec061de12017-02-14 13:27:23 -08004908 if (needs_write_barrier) {
Tijana Jakovljevicba89c342017-03-10 13:36:08 +01004909 DCHECK(value_location.IsRegister());
4910 GpuRegister src = value_location.AsRegister<GpuRegister>();
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004911 codegen_->MarkGCCard(obj, src, value_can_be_null);
Alexey Frunze4dda3372015-06-01 18:31:49 -07004912 }
Alexey Frunze15958152017-02-09 19:08:30 -08004913
4914 if (is_volatile) {
4915 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
4916 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07004917}
4918
4919void LocationsBuilderMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4920 HandleFieldGet(instruction, instruction->GetFieldInfo());
4921}
4922
4923void InstructionCodeGeneratorMIPS64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
4924 HandleFieldGet(instruction, instruction->GetFieldInfo());
4925}
4926
4927void LocationsBuilderMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
4928 HandleFieldSet(instruction, instruction->GetFieldInfo());
4929}
4930
4931void InstructionCodeGeneratorMIPS64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01004932 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07004933}
4934
Alexey Frunze15958152017-02-09 19:08:30 -08004935void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadOneRegister(
4936 HInstruction* instruction,
4937 Location out,
4938 uint32_t offset,
4939 Location maybe_temp,
4940 ReadBarrierOption read_barrier_option) {
4941 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4942 if (read_barrier_option == kWithReadBarrier) {
4943 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004944 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
4945 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4946 }
Alexey Frunze15958152017-02-09 19:08:30 -08004947 if (kUseBakerReadBarrier) {
4948 // Load with fast path based Baker's read barrier.
4949 // /* HeapReference<Object> */ out = *(out + offset)
4950 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4951 out,
4952 out_reg,
4953 offset,
4954 maybe_temp,
4955 /* needs_null_check */ false);
4956 } else {
4957 // Load with slow path based read barrier.
4958 // Save the value of `out` into `maybe_temp` before overwriting it
4959 // in the following move operation, as we will need it for the
4960 // read barrier below.
4961 __ Move(maybe_temp.AsRegister<GpuRegister>(), out_reg);
4962 // /* HeapReference<Object> */ out = *(out + offset)
4963 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4964 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
4965 }
4966 } else {
4967 // Plain load with no read barrier.
4968 // /* HeapReference<Object> */ out = *(out + offset)
4969 __ LoadFromOffset(kLoadUnsignedWord, out_reg, out_reg, offset);
4970 __ MaybeUnpoisonHeapReference(out_reg);
4971 }
4972}
4973
4974void InstructionCodeGeneratorMIPS64::GenerateReferenceLoadTwoRegisters(
4975 HInstruction* instruction,
4976 Location out,
4977 Location obj,
4978 uint32_t offset,
4979 Location maybe_temp,
4980 ReadBarrierOption read_barrier_option) {
4981 GpuRegister out_reg = out.AsRegister<GpuRegister>();
4982 GpuRegister obj_reg = obj.AsRegister<GpuRegister>();
4983 if (read_barrier_option == kWithReadBarrier) {
4984 CHECK(kEmitCompilerReadBarrier);
4985 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07004986 if (!kBakerReadBarrierThunksEnableForFields) {
4987 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
4988 }
Alexey Frunze15958152017-02-09 19:08:30 -08004989 // Load with fast path based Baker's read barrier.
4990 // /* HeapReference<Object> */ out = *(obj + offset)
4991 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
4992 out,
4993 obj_reg,
4994 offset,
4995 maybe_temp,
4996 /* needs_null_check */ false);
4997 } else {
4998 // Load with slow path based read barrier.
4999 // /* HeapReference<Object> */ out = *(obj + offset)
5000 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5001 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
5002 }
5003 } else {
5004 // Plain load with no read barrier.
5005 // /* HeapReference<Object> */ out = *(obj + offset)
5006 __ LoadFromOffset(kLoadUnsignedWord, out_reg, obj_reg, offset);
5007 __ MaybeUnpoisonHeapReference(out_reg);
5008 }
5009}
5010
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005011static inline int GetBakerMarkThunkNumber(GpuRegister reg) {
5012 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 20, "Expecting equal");
5013 if (reg >= V0 && reg <= T2) { // 13 consequtive regs.
5014 return reg - V0;
5015 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
5016 return 13 + (reg - S2);
5017 } else if (reg == S8) { // One more.
5018 return 19;
5019 }
5020 LOG(FATAL) << "Unexpected register " << reg;
5021 UNREACHABLE();
5022}
5023
5024static inline int GetBakerMarkFieldArrayThunkDisplacement(GpuRegister reg, bool short_offset) {
5025 int num = GetBakerMarkThunkNumber(reg) +
5026 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
5027 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
5028}
5029
5030static inline int GetBakerMarkGcRootThunkDisplacement(GpuRegister reg) {
5031 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
5032 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
5033}
5034
5035void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(HInstruction* instruction,
5036 Location root,
5037 GpuRegister obj,
5038 uint32_t offset,
5039 ReadBarrierOption read_barrier_option,
5040 Mips64Label* label_low) {
5041 if (label_low != nullptr) {
5042 DCHECK_EQ(offset, 0x5678u);
5043 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005044 GpuRegister root_reg = root.AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005045 if (read_barrier_option == kWithReadBarrier) {
5046 DCHECK(kEmitCompilerReadBarrier);
5047 if (kUseBakerReadBarrier) {
5048 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
5049 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005050 if (kBakerReadBarrierThunksEnableForGcRoots) {
5051 // Note that we do not actually check the value of `GetIsGcMarking()`
5052 // to decide whether to mark the loaded GC root or not. Instead, we
5053 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5054 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5055 // vice versa.
5056 //
5057 // We use thunks for the slow path. That thunk checks the reference
5058 // and jumps to the entrypoint if needed.
5059 //
5060 // temp = Thread::Current()->pReadBarrierMarkReg00
5061 // // AKA &art_quick_read_barrier_mark_introspection.
5062 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5063 // if (temp != nullptr) {
5064 // temp = &gc_root_thunk<root_reg>
5065 // root = temp(root)
5066 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005067
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005068 const int32_t entry_point_offset =
5069 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5070 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
5071 int16_t offset_low = Low16Bits(offset);
5072 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
5073 // extension in lwu.
5074 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5075 GpuRegister base = short_offset ? obj : TMP;
5076 // Loading the entrypoint does not require a load acquire since it is only changed when
5077 // threads are suspended or running a checkpoint.
5078 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5079 if (!short_offset) {
5080 DCHECK(!label_low);
5081 __ Daui(base, obj, offset_high);
5082 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07005083 Mips64Label skip_call;
5084 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005085 if (label_low != nullptr) {
5086 DCHECK(short_offset);
5087 __ Bind(label_low);
5088 }
5089 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5090 __ LoadFromOffset(kLoadUnsignedWord, root_reg, base, offset_low); // Single instruction
5091 // in delay slot.
5092 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005093 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005094 } else {
5095 // Note that we do not actually check the value of `GetIsGcMarking()`
5096 // to decide whether to mark the loaded GC root or not. Instead, we
5097 // load into `temp` (T9) the read barrier mark entry point corresponding
5098 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
5099 // is false, and vice versa.
5100 //
5101 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
5102 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
5103 // if (temp != null) {
5104 // root = temp(root)
5105 // }
Alexey Frunze15958152017-02-09 19:08:30 -08005106
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005107 if (label_low != nullptr) {
5108 __ Bind(label_low);
5109 }
5110 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5111 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5112 static_assert(
5113 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
5114 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
5115 "have different sizes.");
5116 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
5117 "art::mirror::CompressedReference<mirror::Object> and int32_t "
5118 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08005119
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005120 // Slow path marking the GC root `root`.
5121 Location temp = Location::RegisterLocation(T9);
5122 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005123 new (codegen_->GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005124 instruction,
5125 root,
5126 /*entrypoint*/ temp);
5127 codegen_->AddSlowPath(slow_path);
5128
5129 const int32_t entry_point_offset =
5130 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(root.reg() - 1);
5131 // Loading the entrypoint does not require a load acquire since it is only changed when
5132 // threads are suspended or running a checkpoint.
5133 __ LoadFromOffset(kLoadDoubleword, temp.AsRegister<GpuRegister>(), TR, entry_point_offset);
5134 __ Bnezc(temp.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
5135 __ Bind(slow_path->GetExitLabel());
5136 }
Alexey Frunze15958152017-02-09 19:08:30 -08005137 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005138 if (label_low != nullptr) {
5139 __ Bind(label_low);
5140 }
Alexey Frunze15958152017-02-09 19:08:30 -08005141 // GC root loaded through a slow path for read barriers other
5142 // than Baker's.
5143 // /* GcRoot<mirror::Object>* */ root = obj + offset
5144 __ Daddiu64(root_reg, obj, static_cast<int32_t>(offset));
5145 // /* mirror::Object* */ root = root->Read()
5146 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
5147 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005148 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005149 if (label_low != nullptr) {
5150 __ Bind(label_low);
5151 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08005152 // Plain GC root load with no read barrier.
5153 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
5154 __ LoadFromOffset(kLoadUnsignedWord, root_reg, obj, offset);
5155 // Note that GC roots are not affected by heap poisoning, thus we
5156 // do not have to unpoison `root_reg` here.
5157 }
5158}
5159
Alexey Frunze15958152017-02-09 19:08:30 -08005160void CodeGeneratorMIPS64::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
5161 Location ref,
5162 GpuRegister obj,
5163 uint32_t offset,
5164 Location temp,
5165 bool needs_null_check) {
5166 DCHECK(kEmitCompilerReadBarrier);
5167 DCHECK(kUseBakerReadBarrier);
5168
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005169 if (kBakerReadBarrierThunksEnableForFields) {
5170 // Note that we do not actually check the value of `GetIsGcMarking()`
5171 // to decide whether to mark the loaded reference or not. Instead, we
5172 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5173 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5174 // vice versa.
5175 //
5176 // We use thunks for the slow path. That thunk checks the reference
5177 // and jumps to the entrypoint if needed. If the holder is not gray,
5178 // it issues a load-load memory barrier and returns to the original
5179 // reference load.
5180 //
5181 // temp = Thread::Current()->pReadBarrierMarkReg00
5182 // // AKA &art_quick_read_barrier_mark_introspection.
5183 // if (temp != nullptr) {
5184 // temp = &field_array_thunk<holder_reg>
5185 // temp()
5186 // }
5187 // not_gray_return_address:
5188 // // If the offset is too large to fit into the lw instruction, we
5189 // // use an adjusted base register (TMP) here. This register
5190 // // receives bits 16 ... 31 of the offset before the thunk invocation
5191 // // and the thunk benefits from it.
5192 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
5193 // gray_return_address:
5194
5195 DCHECK(temp.IsInvalid());
5196 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
5197 const int32_t entry_point_offset =
5198 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5199 // There may have or may have not been a null check if the field offset is smaller than
5200 // the page size.
5201 // There must've been a null check in case it's actually a load from an array.
5202 // We will, however, perform an explicit null check in the thunk as it's easier to
5203 // do it than not.
5204 if (instruction->IsArrayGet()) {
5205 DCHECK(!needs_null_check);
5206 }
5207 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
5208 // Loading the entrypoint does not require a load acquire since it is only changed when
5209 // threads are suspended or running a checkpoint.
5210 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
5211 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07005212 Mips64Label skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005213 if (short_offset) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07005214 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005215 __ Nop(); // In forbidden slot.
5216 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005217 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005218 // /* HeapReference<Object> */ ref = *(obj + offset)
5219 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset); // Single instruction.
5220 } else {
5221 int16_t offset_low = Low16Bits(offset);
5222 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lwu.
Alexey Frunze0cab6562017-07-25 15:19:36 -07005223 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005224 __ Daui(TMP, obj, offset_high); // In delay slot.
5225 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005226 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005227 // /* HeapReference<Object> */ ref = *(obj + offset)
5228 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset_low); // Single instruction.
5229 }
5230 if (needs_null_check) {
5231 MaybeRecordImplicitNullCheck(instruction);
5232 }
5233 __ MaybeUnpoisonHeapReference(ref_reg);
5234 return;
5235 }
5236
Alexey Frunze15958152017-02-09 19:08:30 -08005237 // /* HeapReference<Object> */ ref = *(obj + offset)
5238 Location no_index = Location::NoLocation();
5239 ScaleFactor no_scale_factor = TIMES_1;
5240 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5241 ref,
5242 obj,
5243 offset,
5244 no_index,
5245 no_scale_factor,
5246 temp,
5247 needs_null_check);
5248}
5249
5250void CodeGeneratorMIPS64::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
5251 Location ref,
5252 GpuRegister obj,
5253 uint32_t data_offset,
5254 Location index,
5255 Location temp,
5256 bool needs_null_check) {
5257 DCHECK(kEmitCompilerReadBarrier);
5258 DCHECK(kUseBakerReadBarrier);
5259
5260 static_assert(
5261 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
5262 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005263 ScaleFactor scale_factor = TIMES_4;
5264
5265 if (kBakerReadBarrierThunksEnableForArrays) {
5266 // Note that we do not actually check the value of `GetIsGcMarking()`
5267 // to decide whether to mark the loaded reference or not. Instead, we
5268 // load into `temp` (T9) the read barrier mark introspection entrypoint.
5269 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
5270 // vice versa.
5271 //
5272 // We use thunks for the slow path. That thunk checks the reference
5273 // and jumps to the entrypoint if needed. If the holder is not gray,
5274 // it issues a load-load memory barrier and returns to the original
5275 // reference load.
5276 //
5277 // temp = Thread::Current()->pReadBarrierMarkReg00
5278 // // AKA &art_quick_read_barrier_mark_introspection.
5279 // if (temp != nullptr) {
5280 // temp = &field_array_thunk<holder_reg>
5281 // temp()
5282 // }
5283 // not_gray_return_address:
5284 // // The element address is pre-calculated in the TMP register before the
5285 // // thunk invocation and the thunk benefits from it.
5286 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
5287 // gray_return_address:
5288
5289 DCHECK(temp.IsInvalid());
5290 DCHECK(index.IsValid());
5291 const int32_t entry_point_offset =
5292 Thread::ReadBarrierMarkEntryPointsOffset<kMips64PointerSize>(0);
5293 // We will not do the explicit null check in the thunk as some form of a null check
5294 // must've been done earlier.
5295 DCHECK(!needs_null_check);
5296 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
5297 // Loading the entrypoint does not require a load acquire since it is only changed when
5298 // threads are suspended or running a checkpoint.
5299 __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005300 Mips64Label skip_call;
5301 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005302 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5303 GpuRegister index_reg = index.AsRegister<GpuRegister>();
5304 __ Dlsa(TMP, index_reg, obj, scale_factor); // In delay slot.
5305 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07005306 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07005307 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
5308 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
5309 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, data_offset); // Single instruction.
5310 __ MaybeUnpoisonHeapReference(ref_reg);
5311 return;
5312 }
5313
Alexey Frunze15958152017-02-09 19:08:30 -08005314 // /* HeapReference<Object> */ ref =
5315 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08005316 GenerateReferenceLoadWithBakerReadBarrier(instruction,
5317 ref,
5318 obj,
5319 data_offset,
5320 index,
5321 scale_factor,
5322 temp,
5323 needs_null_check);
5324}
5325
5326void CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
5327 Location ref,
5328 GpuRegister obj,
5329 uint32_t offset,
5330 Location index,
5331 ScaleFactor scale_factor,
5332 Location temp,
5333 bool needs_null_check,
5334 bool always_update_field) {
5335 DCHECK(kEmitCompilerReadBarrier);
5336 DCHECK(kUseBakerReadBarrier);
5337
5338 // In slow path based read barriers, the read barrier call is
5339 // inserted after the original load. However, in fast path based
5340 // Baker's read barriers, we need to perform the load of
5341 // mirror::Object::monitor_ *before* the original reference load.
5342 // This load-load ordering is required by the read barrier.
5343 // The fast path/slow path (for Baker's algorithm) should look like:
5344 //
5345 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
5346 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
5347 // HeapReference<Object> ref = *src; // Original reference load.
5348 // bool is_gray = (rb_state == ReadBarrier::GrayState());
5349 // if (is_gray) {
5350 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
5351 // }
5352 //
5353 // Note: the original implementation in ReadBarrier::Barrier is
5354 // slightly more complex as it performs additional checks that we do
5355 // not do here for performance reasons.
5356
5357 GpuRegister ref_reg = ref.AsRegister<GpuRegister>();
5358 GpuRegister temp_reg = temp.AsRegister<GpuRegister>();
5359 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
5360
5361 // /* int32_t */ monitor = obj->monitor_
5362 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
5363 if (needs_null_check) {
5364 MaybeRecordImplicitNullCheck(instruction);
5365 }
5366 // /* LockWord */ lock_word = LockWord(monitor)
5367 static_assert(sizeof(LockWord) == sizeof(int32_t),
5368 "art::LockWord and int32_t have different sizes.");
5369
5370 __ Sync(0); // Barrier to prevent load-load reordering.
5371
5372 // The actual reference load.
5373 if (index.IsValid()) {
5374 // Load types involving an "index": ArrayGet,
5375 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
5376 // intrinsics.
5377 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
5378 if (index.IsConstant()) {
5379 size_t computed_offset =
5380 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
5381 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, computed_offset);
5382 } else {
5383 GpuRegister index_reg = index.AsRegister<GpuRegister>();
Chris Larsencd0295d2017-03-31 15:26:54 -07005384 if (scale_factor == TIMES_1) {
5385 __ Daddu(TMP, index_reg, obj);
5386 } else {
5387 __ Dlsa(TMP, index_reg, obj, scale_factor);
5388 }
Alexey Frunze15958152017-02-09 19:08:30 -08005389 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, TMP, offset);
5390 }
5391 } else {
5392 // /* HeapReference<Object> */ ref = *(obj + offset)
5393 __ LoadFromOffset(kLoadUnsignedWord, ref_reg, obj, offset);
5394 }
5395
5396 // Object* ref = ref_addr->AsMirrorPtr()
5397 __ MaybeUnpoisonHeapReference(ref_reg);
5398
5399 // Slow path marking the object `ref` when it is gray.
5400 SlowPathCodeMIPS64* slow_path;
5401 if (always_update_field) {
5402 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS64 only supports address
5403 // of the form `obj + field_offset`, where `obj` is a register and
5404 // `field_offset` is a register. Thus `offset` and `scale_factor`
5405 // above are expected to be null in this code path.
5406 DCHECK_EQ(offset, 0u);
5407 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
Vladimir Marko174b2e22017-10-12 13:34:49 +01005408 slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005409 ReadBarrierMarkAndUpdateFieldSlowPathMIPS64(instruction,
5410 ref,
5411 obj,
5412 /* field_offset */ index,
5413 temp_reg);
5414 } else {
Vladimir Marko174b2e22017-10-12 13:34:49 +01005415 slow_path = new (GetScopedAllocator()) ReadBarrierMarkSlowPathMIPS64(instruction, ref);
Alexey Frunze15958152017-02-09 19:08:30 -08005416 }
5417 AddSlowPath(slow_path);
5418
5419 // if (rb_state == ReadBarrier::GrayState())
5420 // ref = ReadBarrier::Mark(ref);
5421 // Given the numeric representation, it's enough to check the low bit of the
5422 // rb_state. We do that by shifting the bit into the sign bit (31) and
5423 // performing a branch on less than zero.
5424 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
5425 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
5426 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
5427 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
5428 __ Bltzc(temp_reg, slow_path->GetEntryLabel());
5429 __ Bind(slow_path->GetExitLabel());
5430}
5431
5432void CodeGeneratorMIPS64::GenerateReadBarrierSlow(HInstruction* instruction,
5433 Location out,
5434 Location ref,
5435 Location obj,
5436 uint32_t offset,
5437 Location index) {
5438 DCHECK(kEmitCompilerReadBarrier);
5439
5440 // Insert a slow path based read barrier *after* the reference load.
5441 //
5442 // If heap poisoning is enabled, the unpoisoning of the loaded
5443 // reference will be carried out by the runtime within the slow
5444 // path.
5445 //
5446 // Note that `ref` currently does not get unpoisoned (when heap
5447 // poisoning is enabled), which is alright as the `ref` argument is
5448 // not used by the artReadBarrierSlow entry point.
5449 //
5450 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
Vladimir Marko174b2e22017-10-12 13:34:49 +01005451 SlowPathCodeMIPS64* slow_path = new (GetScopedAllocator())
Alexey Frunze15958152017-02-09 19:08:30 -08005452 ReadBarrierForHeapReferenceSlowPathMIPS64(instruction, out, ref, obj, offset, index);
5453 AddSlowPath(slow_path);
5454
5455 __ Bc(slow_path->GetEntryLabel());
5456 __ Bind(slow_path->GetExitLabel());
5457}
5458
5459void CodeGeneratorMIPS64::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
5460 Location out,
5461 Location ref,
5462 Location obj,
5463 uint32_t offset,
5464 Location index) {
5465 if (kEmitCompilerReadBarrier) {
5466 // Baker's read barriers shall be handled by the fast path
5467 // (CodeGeneratorMIPS64::GenerateReferenceLoadWithBakerReadBarrier).
5468 DCHECK(!kUseBakerReadBarrier);
5469 // If heap poisoning is enabled, unpoisoning will be taken care of
5470 // by the runtime within the slow path.
5471 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
5472 } else if (kPoisonHeapReferences) {
5473 __ UnpoisonHeapReference(out.AsRegister<GpuRegister>());
5474 }
5475}
5476
5477void CodeGeneratorMIPS64::GenerateReadBarrierForRootSlow(HInstruction* instruction,
5478 Location out,
5479 Location root) {
5480 DCHECK(kEmitCompilerReadBarrier);
5481
5482 // Insert a slow path based read barrier *after* the GC root load.
5483 //
5484 // Note that GC roots are not affected by heap poisoning, so we do
5485 // not need to do anything special for this here.
5486 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01005487 new (GetScopedAllocator()) ReadBarrierForRootSlowPathMIPS64(instruction, out, root);
Alexey Frunze15958152017-02-09 19:08:30 -08005488 AddSlowPath(slow_path);
5489
5490 __ Bc(slow_path->GetEntryLabel());
5491 __ Bind(slow_path->GetExitLabel());
5492}
5493
Alexey Frunze4dda3372015-06-01 18:31:49 -07005494void LocationsBuilderMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005495 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
5496 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07005497 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005498 switch (type_check_kind) {
5499 case TypeCheckKind::kExactCheck:
5500 case TypeCheckKind::kAbstractClassCheck:
5501 case TypeCheckKind::kClassHierarchyCheck:
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005502 case TypeCheckKind::kArrayObjectCheck: {
5503 bool needs_read_barrier = CodeGenerator::InstanceOfNeedsReadBarrier(instruction);
5504 call_kind = needs_read_barrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
5505 baker_read_barrier_slow_path = kUseBakerReadBarrier && needs_read_barrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005506 break;
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005507 }
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005508 case TypeCheckKind::kArrayCheck:
5509 case TypeCheckKind::kUnresolvedCheck:
5510 case TypeCheckKind::kInterfaceCheck:
5511 call_kind = LocationSummary::kCallOnSlowPath;
5512 break;
5513 }
5514
Vladimir Markoca6fff82017-10-03 14:49:14 +01005515 LocationSummary* locations =
5516 new (GetGraph()->GetAllocator()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07005517 if (baker_read_barrier_slow_path) {
5518 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
5519 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005520 locations->SetInAt(0, Location::RequiresRegister());
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005521 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005522 // The output does overlap inputs.
Serban Constantinescu5a6cc492015-08-13 15:20:25 +01005523 // Note that TypeCheckSlowPathMIPS64 uses this register too.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005524 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08005525 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005526}
5527
5528void InstructionCodeGeneratorMIPS64::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005529 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005530 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08005531 Location obj_loc = locations->InAt(0);
5532 GpuRegister obj = obj_loc.AsRegister<GpuRegister>();
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005533 GpuRegister cls = locations->InAt(1).AsRegister<GpuRegister>();
Alexey Frunze15958152017-02-09 19:08:30 -08005534 Location out_loc = locations->Out();
5535 GpuRegister out = out_loc.AsRegister<GpuRegister>();
5536 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
5537 DCHECK_LE(num_temps, 1u);
5538 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005539 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
5540 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
5541 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
5542 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005543 Mips64Label done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005544 SlowPathCodeMIPS64* slow_path = nullptr;
Alexey Frunze4dda3372015-06-01 18:31:49 -07005545
5546 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005547 // Avoid this check if we know `obj` is not null.
5548 if (instruction->MustDoNullCheck()) {
5549 __ Move(out, ZERO);
5550 __ Beqzc(obj, &done);
5551 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005552
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005553 switch (type_check_kind) {
5554 case TypeCheckKind::kExactCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005555 ReadBarrierOption read_barrier_option =
5556 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005557 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005558 GenerateReferenceLoadTwoRegisters(instruction,
5559 out_loc,
5560 obj_loc,
5561 class_offset,
5562 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005563 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005564 // Classes must be equal for the instanceof to succeed.
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005565 __ Xor(out, out, cls);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005566 __ Sltiu(out, out, 1);
5567 break;
5568 }
5569
5570 case TypeCheckKind::kAbstractClassCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005571 ReadBarrierOption read_barrier_option =
5572 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005573 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005574 GenerateReferenceLoadTwoRegisters(instruction,
5575 out_loc,
5576 obj_loc,
5577 class_offset,
5578 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005579 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005580 // If the class is abstract, we eagerly fetch the super class of the
5581 // object to avoid doing a comparison we know will fail.
5582 Mips64Label loop;
5583 __ Bind(&loop);
5584 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005585 GenerateReferenceLoadOneRegister(instruction,
5586 out_loc,
5587 super_offset,
5588 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005589 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005590 // If `out` is null, we use it for the result, and jump to `done`.
5591 __ Beqzc(out, &done);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005592 __ Bnec(out, cls, &loop);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005593 __ LoadConst32(out, 1);
5594 break;
5595 }
5596
5597 case TypeCheckKind::kClassHierarchyCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005598 ReadBarrierOption read_barrier_option =
5599 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005600 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005601 GenerateReferenceLoadTwoRegisters(instruction,
5602 out_loc,
5603 obj_loc,
5604 class_offset,
5605 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005606 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005607 // Walk over the class hierarchy to find a match.
5608 Mips64Label loop, success;
5609 __ Bind(&loop);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005610 __ Beqc(out, cls, &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005611 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08005612 GenerateReferenceLoadOneRegister(instruction,
5613 out_loc,
5614 super_offset,
5615 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005616 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005617 __ Bnezc(out, &loop);
5618 // If `out` is null, we use it for the result, and jump to `done`.
5619 __ Bc(&done);
5620 __ Bind(&success);
5621 __ LoadConst32(out, 1);
5622 break;
5623 }
5624
5625 case TypeCheckKind::kArrayObjectCheck: {
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005626 ReadBarrierOption read_barrier_option =
5627 CodeGenerator::ReadBarrierOptionForInstanceOf(instruction);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005628 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005629 GenerateReferenceLoadTwoRegisters(instruction,
5630 out_loc,
5631 obj_loc,
5632 class_offset,
5633 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005634 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005635 // Do an exact check.
5636 Mips64Label success;
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005637 __ Beqc(out, cls, &success);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005638 // Otherwise, we need to check that the object's class is a non-primitive array.
5639 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08005640 GenerateReferenceLoadOneRegister(instruction,
5641 out_loc,
5642 component_offset,
5643 maybe_temp_loc,
Alexey Frunzedfc30af2018-01-24 16:25:10 -08005644 read_barrier_option);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005645 // If `out` is null, we use it for the result, and jump to `done`.
5646 __ Beqzc(out, &done);
5647 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
5648 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
5649 __ Sltiu(out, out, 1);
5650 __ Bc(&done);
5651 __ Bind(&success);
5652 __ LoadConst32(out, 1);
5653 break;
5654 }
5655
5656 case TypeCheckKind::kArrayCheck: {
5657 // No read barrier since the slow path will retry upon failure.
5658 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08005659 GenerateReferenceLoadTwoRegisters(instruction,
5660 out_loc,
5661 obj_loc,
5662 class_offset,
5663 maybe_temp_loc,
5664 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005665 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005666 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5667 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005668 codegen_->AddSlowPath(slow_path);
Nicolas Geoffraybff7a522018-01-25 13:33:07 +00005669 __ Bnec(out, cls, slow_path->GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005670 __ LoadConst32(out, 1);
5671 break;
5672 }
5673
5674 case TypeCheckKind::kUnresolvedCheck:
5675 case TypeCheckKind::kInterfaceCheck: {
5676 // Note that we indeed only call on slow path, but we always go
5677 // into the slow path for the unresolved and interface check
5678 // cases.
5679 //
5680 // We cannot directly call the InstanceofNonTrivial runtime
5681 // entry point without resorting to a type checking slow path
5682 // here (i.e. by calling InvokeRuntime directly), as it would
5683 // require to assign fixed registers for the inputs of this
5684 // HInstanceOf instruction (following the runtime calling
5685 // convention), which might be cluttered by the potential first
5686 // read barrier emission at the beginning of this method.
5687 //
5688 // TODO: Introduce a new runtime entry point taking the object
5689 // to test (instead of its class) as argument, and let it deal
5690 // with the read barrier issues. This will let us refactor this
5691 // case of the `switch` code as it was previously (with a direct
5692 // call to the runtime not using a type checking slow path).
5693 // This should also be beneficial for the other cases above.
5694 DCHECK(locations->OnlyCallsOnSlowPath());
Vladimir Marko174b2e22017-10-12 13:34:49 +01005695 slow_path = new (codegen_->GetScopedAllocator()) TypeCheckSlowPathMIPS64(
5696 instruction, /* is_fatal */ false);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005697 codegen_->AddSlowPath(slow_path);
5698 __ Bc(slow_path->GetEntryLabel());
5699 break;
5700 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005701 }
5702
5703 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08005704
5705 if (slow_path != nullptr) {
5706 __ Bind(slow_path->GetExitLabel());
5707 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005708}
5709
5710void LocationsBuilderMIPS64::VisitIntConstant(HIntConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005711 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005712 locations->SetOut(Location::ConstantLocation(constant));
5713}
5714
5715void InstructionCodeGeneratorMIPS64::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
5716 // Will be generated at use site.
5717}
5718
5719void LocationsBuilderMIPS64::VisitNullConstant(HNullConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01005720 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005721 locations->SetOut(Location::ConstantLocation(constant));
5722}
5723
5724void InstructionCodeGeneratorMIPS64::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
5725 // Will be generated at use site.
5726}
5727
Calin Juravle175dc732015-08-25 15:42:32 +01005728void LocationsBuilderMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5729 // The trampoline uses the same calling convention as dex calling conventions,
5730 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
5731 // the method_idx.
5732 HandleInvoke(invoke);
5733}
5734
5735void InstructionCodeGeneratorMIPS64::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
5736 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
5737}
5738
Alexey Frunze4dda3372015-06-01 18:31:49 -07005739void LocationsBuilderMIPS64::HandleInvoke(HInvoke* invoke) {
5740 InvokeDexCallingConventionVisitorMIPS64 calling_convention_visitor;
5741 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
5742}
5743
5744void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5745 HandleInvoke(invoke);
5746 // The register T0 is required to be used for the hidden argument in
5747 // art_quick_imt_conflict_trampoline, so add the hidden argument.
5748 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T0));
5749}
5750
5751void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) {
5752 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
5753 GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005754 Location receiver = invoke->GetLocations()->InAt(0);
5755 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005756 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005757
5758 // Set the hidden argument.
5759 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<GpuRegister>(),
5760 invoke->GetDexMethodIndex());
5761
5762 // temp = object->GetClass();
5763 if (receiver.IsStackSlot()) {
5764 __ LoadFromOffset(kLoadUnsignedWord, temp, SP, receiver.GetStackIndex());
5765 __ LoadFromOffset(kLoadUnsignedWord, temp, temp, class_offset);
5766 } else {
5767 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister<GpuRegister>(), class_offset);
5768 }
5769 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005770 // Instead of simply (possibly) unpoisoning `temp` here, we should
5771 // emit a read barrier for the previous class reference load.
5772 // However this is not required in practice, as this is an
5773 // intermediate/temporary reference and because the current
5774 // concurrent copying collector keeps the from-space memory
5775 // intact/accessible until the end of the marking phase (the
5776 // concurrent copying collector may not in the future).
5777 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00005778 __ LoadFromOffset(kLoadDoubleword, temp, temp,
5779 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
5780 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00005781 invoke->GetImtIndex(), kMips64PointerSize));
Alexey Frunze4dda3372015-06-01 18:31:49 -07005782 // temp = temp->GetImtEntryAt(method_offset);
5783 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
5784 // T9 = temp->GetEntryPoint();
5785 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
5786 // T9();
5787 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005788 __ Nop();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005789 DCHECK(!codegen_->IsLeafMethod());
5790 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
5791}
5792
5793void LocationsBuilderMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen3039e382015-08-26 07:54:08 -07005794 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5795 if (intrinsic.TryDispatch(invoke)) {
5796 return;
5797 }
5798
Alexey Frunze4dda3372015-06-01 18:31:49 -07005799 HandleInvoke(invoke);
5800}
5801
5802void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005803 // Explicit clinit checks triggered by static invokes must have been pruned by
5804 // art::PrepareForRegisterAllocation.
5805 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005806
Chris Larsen3039e382015-08-26 07:54:08 -07005807 IntrinsicLocationsBuilderMIPS64 intrinsic(codegen_);
5808 if (intrinsic.TryDispatch(invoke)) {
5809 return;
5810 }
5811
Alexey Frunze4dda3372015-06-01 18:31:49 -07005812 HandleInvoke(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005813}
5814
Orion Hodsonac141392017-01-13 11:53:47 +00005815void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5816 HandleInvoke(invoke);
5817}
5818
5819void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
5820 codegen_->GenerateInvokePolymorphicCall(invoke);
5821}
5822
Chris Larsen3039e382015-08-26 07:54:08 -07005823static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005824 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen3039e382015-08-26 07:54:08 -07005825 IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
5826 intrinsic.Dispatch(invoke);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005827 return true;
5828 }
5829 return false;
5830}
5831
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005832HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
Alexey Frunzef63f5692016-12-13 17:43:11 -08005833 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005834 bool fallback_load = false;
5835 switch (desired_string_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005836 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01005837 case HLoadString::LoadKind::kBootImageInternTable:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005838 case HLoadString::LoadKind::kBssEntry:
5839 DCHECK(!Runtime::Current()->UseJitCompilation());
5840 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005841 case HLoadString::LoadKind::kJitTableAddress:
5842 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005843 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005844 case HLoadString::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005845 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko764d4542017-05-16 10:31:41 +01005846 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005847 }
5848 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005849 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005850 }
5851 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00005852}
5853
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005854HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
5855 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08005856 bool fallback_load = false;
5857 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00005858 case HLoadClass::LoadKind::kInvalid:
5859 LOG(FATAL) << "UNREACHABLE";
5860 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08005861 case HLoadClass::LoadKind::kReferrersClass:
5862 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005863 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko94ec2db2017-09-06 17:21:03 +01005864 case HLoadClass::LoadKind::kBootImageClassTable:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00005865 case HLoadClass::LoadKind::kBssEntry:
5866 DCHECK(!Runtime::Current()->UseJitCompilation());
5867 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005868 case HLoadClass::LoadKind::kJitTableAddress:
5869 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunzef63f5692016-12-13 17:43:11 -08005870 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01005871 case HLoadClass::LoadKind::kBootImageAddress:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005872 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunzef63f5692016-12-13 17:43:11 -08005873 break;
5874 }
5875 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01005876 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunzef63f5692016-12-13 17:43:11 -08005877 }
5878 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01005879}
5880
Vladimir Markodc151b22015-10-15 18:02:30 +01005881HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
5882 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01005883 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunze19f6c692016-11-30 19:19:55 -08005884 // On MIPS64 we support all dispatch types.
5885 return desired_dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01005886}
5887
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005888void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(
5889 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07005890 // All registers are assumed to be correctly set up per the calling convention.
Vladimir Marko58155012015-08-19 12:49:41 +00005891 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunze19f6c692016-11-30 19:19:55 -08005892 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
5893 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
5894
Alexey Frunze19f6c692016-11-30 19:19:55 -08005895 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005896 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Vladimir Marko58155012015-08-19 12:49:41 +00005897 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005898 uint32_t offset =
5899 GetThreadOffset<kMips64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Vladimir Marko58155012015-08-19 12:49:41 +00005900 __ LoadFromOffset(kLoadDoubleword,
5901 temp.AsRegister<GpuRegister>(),
5902 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005903 offset);
Vladimir Marko58155012015-08-19 12:49:41 +00005904 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01005905 }
Vladimir Marko58155012015-08-19 12:49:41 +00005906 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00005907 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Vladimir Marko58155012015-08-19 12:49:41 +00005908 break;
Vladimir Marko65979462017-05-19 17:25:12 +01005909 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
5910 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005911 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko65979462017-05-19 17:25:12 +01005912 NewPcRelativeMethodPatch(invoke->GetTargetMethod());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005913 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
5914 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
5915 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Vladimir Marko65979462017-05-19 17:25:12 +01005916 __ Daddiu(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5917 break;
5918 }
Vladimir Marko58155012015-08-19 12:49:41 +00005919 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005920 __ LoadLiteral(temp.AsRegister<GpuRegister>(),
5921 kLoadDoubleword,
5922 DeduplicateUint64Literal(invoke->GetMethodAddress()));
Vladimir Marko58155012015-08-19 12:49:41 +00005923 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005924 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005925 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01005926 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07005927 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
5928 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
5929 EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunze19f6c692016-11-30 19:19:55 -08005930 __ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
5931 break;
5932 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005933 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
5934 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
5935 return; // No code pointer retrieval; the runtime performs the call directly.
Alexey Frunze4dda3372015-06-01 18:31:49 -07005936 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07005937 }
5938
Alexey Frunze19f6c692016-11-30 19:19:55 -08005939 switch (code_ptr_location) {
Vladimir Marko58155012015-08-19 12:49:41 +00005940 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunze19f6c692016-11-30 19:19:55 -08005941 __ Balc(&frame_entry_label_);
Vladimir Marko58155012015-08-19 12:49:41 +00005942 break;
Vladimir Marko58155012015-08-19 12:49:41 +00005943 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
5944 // T9 = callee_method->entry_point_from_quick_compiled_code_;
5945 __ LoadFromOffset(kLoadDoubleword,
5946 T9,
5947 callee_method.AsRegister<GpuRegister>(),
5948 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07005949 kMips64PointerSize).Int32Value());
Vladimir Marko58155012015-08-19 12:49:41 +00005950 // T9()
5951 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07005952 __ Nop();
Vladimir Marko58155012015-08-19 12:49:41 +00005953 break;
5954 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005955 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
5956
Alexey Frunze4dda3372015-06-01 18:31:49 -07005957 DCHECK(!IsLeafMethod());
5958}
5959
5960void InstructionCodeGeneratorMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00005961 // Explicit clinit checks triggered by static invokes must have been pruned by
5962 // art::PrepareForRegisterAllocation.
5963 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005964
5965 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
5966 return;
5967 }
5968
5969 LocationSummary* locations = invoke->GetLocations();
5970 codegen_->GenerateStaticOrDirectCall(invoke,
5971 locations->HasTemps()
5972 ? locations->GetTemp(0)
5973 : Location::NoLocation());
Alexey Frunze4dda3372015-06-01 18:31:49 -07005974}
5975
Vladimir Markoe7197bf2017-06-02 17:00:23 +01005976void CodeGeneratorMIPS64::GenerateVirtualCall(
5977 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005978 // Use the calling convention instead of the location of the receiver, as
5979 // intrinsics may have put the receiver in a different register. In the intrinsics
5980 // slow path, the arguments have been moved to the right place, so here we are
5981 // guaranteed that the receiver is the first register of the calling convention.
5982 InvokeDexCallingConvention calling_convention;
5983 GpuRegister receiver = calling_convention.GetRegisterAt(0);
5984
Alexey Frunze53afca12015-11-05 16:34:23 -08005985 GpuRegister temp = temp_location.AsRegister<GpuRegister>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07005986 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
5987 invoke->GetVTableIndex(), kMips64PointerSize).SizeValue();
5988 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07005989 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
Alexey Frunze4dda3372015-06-01 18:31:49 -07005990
5991 // temp = object->GetClass();
Nicolas Geoffraye5234232015-12-02 09:06:11 +00005992 __ LoadFromOffset(kLoadUnsignedWord, temp, receiver, class_offset);
Alexey Frunze53afca12015-11-05 16:34:23 -08005993 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08005994 // Instead of simply (possibly) unpoisoning `temp` here, we should
5995 // emit a read barrier for the previous class reference load.
5996 // However this is not required in practice, as this is an
5997 // intermediate/temporary reference and because the current
5998 // concurrent copying collector keeps the from-space memory
5999 // intact/accessible until the end of the marking phase (the
6000 // concurrent copying collector may not in the future).
6001 __ MaybeUnpoisonHeapReference(temp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006002 // temp = temp->GetMethodAt(method_offset);
6003 __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset);
6004 // T9 = temp->GetEntryPoint();
6005 __ LoadFromOffset(kLoadDoubleword, T9, temp, entry_point.Int32Value());
6006 // T9();
6007 __ Jalr(T9);
Alexey Frunzea0e87b02015-09-24 22:57:20 -07006008 __ Nop();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01006009 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Alexey Frunze53afca12015-11-05 16:34:23 -08006010}
6011
6012void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
6013 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
6014 return;
6015 }
6016
6017 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006018 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006019}
6020
6021void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00006022 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006023 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006024 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006025 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
6026 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006027 return;
6028 }
Vladimir Marko41559982017-01-06 14:04:23 +00006029 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006030
Alexey Frunze15958152017-02-09 19:08:30 -08006031 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
6032 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunzef63f5692016-12-13 17:43:11 -08006033 ? LocationSummary::kCallOnSlowPath
6034 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006035 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006036 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
6037 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6038 }
Vladimir Marko41559982017-01-06 14:04:23 +00006039 if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006040 locations->SetInAt(0, Location::RequiresRegister());
6041 }
6042 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006043 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
6044 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6045 // Rely on the type resolution or initialization and marking to save everything we need.
6046 RegisterSet caller_saves = RegisterSet::Empty();
6047 InvokeRuntimeCallingConvention calling_convention;
6048 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6049 locations->SetCustomSlowPathCallerSaves(caller_saves);
6050 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006051 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006052 }
6053 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006054}
6055
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006056// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6057// move.
6058void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00006059 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006060 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00006061 codegen_->GenerateLoadClassRuntimeCall(cls);
Calin Juravle580b6092015-10-06 17:35:58 +01006062 return;
6063 }
Vladimir Marko41559982017-01-06 14:04:23 +00006064 DCHECK(!cls->NeedsAccessCheck());
Calin Juravle580b6092015-10-06 17:35:58 +01006065
Vladimir Marko41559982017-01-06 14:04:23 +00006066 LocationSummary* locations = cls->GetLocations();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006067 Location out_loc = locations->Out();
6068 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6069 GpuRegister current_method_reg = ZERO;
6070 if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006071 load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006072 current_method_reg = locations->InAt(0).AsRegister<GpuRegister>();
6073 }
6074
Alexey Frunze15958152017-02-09 19:08:30 -08006075 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
6076 ? kWithoutReadBarrier
6077 : kCompilerReadBarrierOption;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006078 bool generate_null_check = false;
6079 switch (load_kind) {
6080 case HLoadClass::LoadKind::kReferrersClass:
6081 DCHECK(!cls->CanCallRuntime());
6082 DCHECK(!cls->MustGenerateClinitCheck());
6083 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
6084 GenerateGcRootFieldLoad(cls,
6085 out_loc,
6086 current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08006087 ArtMethod::DeclaringClassOffset().Int32Value(),
6088 read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006089 break;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006090 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006091 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08006092 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006093 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Alexey Frunzef63f5692016-12-13 17:43:11 -08006094 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006095 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6096 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6097 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006098 __ Daddiu(out, AT, /* placeholder */ 0x5678);
6099 break;
6100 }
6101 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08006102 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00006103 uint32_t address = dchecked_integral_cast<uint32_t>(
6104 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
6105 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006106 __ LoadLiteral(out,
6107 kLoadUnsignedWord,
6108 codegen_->DeduplicateBootImageAddressLiteral(address));
6109 break;
6110 }
Vladimir Marko94ec2db2017-09-06 17:21:03 +01006111 case HLoadClass::LoadKind::kBootImageClassTable: {
6112 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6113 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6114 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
6115 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6116 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
6117 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6118 __ Lwu(out, AT, /* placeholder */ 0x5678);
6119 // Extract the reference from the slot data, i.e. clear the hash bits.
6120 int32_t masked_hash = ClassTable::TableSlot::MaskHash(
6121 ComputeModifiedUtf8Hash(cls->GetDexFile().StringByTypeIdx(cls->GetTypeIndex())));
6122 if (masked_hash != 0) {
6123 __ Daddiu(out, out, -masked_hash);
6124 }
6125 break;
6126 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006127 case HLoadClass::LoadKind::kBssEntry: {
Vladimir Markof3c52b42017-11-17 17:32:12 +00006128 CodeGeneratorMIPS64::PcRelativePatchInfo* bss_info_high =
6129 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006130 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6131 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006132 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high, out);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006133 GenerateGcRootFieldLoad(cls,
6134 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006135 out,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006136 /* placeholder */ 0x5678,
6137 read_barrier_option,
6138 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006139 generate_null_check = true;
6140 break;
6141 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006142 case HLoadClass::LoadKind::kJitTableAddress:
6143 __ LoadLiteral(out,
6144 kLoadUnsignedWord,
6145 codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
6146 cls->GetTypeIndex(),
6147 cls->GetClass()));
Alexey Frunze15958152017-02-09 19:08:30 -08006148 GenerateGcRootFieldLoad(cls, out_loc, out, 0, read_barrier_option);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006149 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006150 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00006151 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00006152 LOG(FATAL) << "UNREACHABLE";
6153 UNREACHABLE();
Alexey Frunzef63f5692016-12-13 17:43:11 -08006154 }
6155
6156 if (generate_null_check || cls->MustGenerateClinitCheck()) {
6157 DCHECK(cls->CanCallRuntime());
Vladimir Marko174b2e22017-10-12 13:34:49 +01006158 SlowPathCodeMIPS64* slow_path = new (codegen_->GetScopedAllocator()) LoadClassSlowPathMIPS64(
Vladimir Markof3c52b42017-11-17 17:32:12 +00006159 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
Alexey Frunzef63f5692016-12-13 17:43:11 -08006160 codegen_->AddSlowPath(slow_path);
6161 if (generate_null_check) {
6162 __ Beqzc(out, slow_path->GetEntryLabel());
6163 }
6164 if (cls->MustGenerateClinitCheck()) {
6165 GenerateClassInitializationCheck(slow_path, out);
6166 } else {
6167 __ Bind(slow_path->GetExitLabel());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006168 }
6169 }
6170}
6171
David Brazdilcb1c0552015-08-04 16:22:25 +01006172static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07006173 return Thread::ExceptionOffset<kMips64PointerSize>().Int32Value();
David Brazdilcb1c0552015-08-04 16:22:25 +01006174}
6175
Alexey Frunze4dda3372015-06-01 18:31:49 -07006176void LocationsBuilderMIPS64::VisitLoadException(HLoadException* load) {
6177 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006178 new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006179 locations->SetOut(Location::RequiresRegister());
6180}
6181
6182void InstructionCodeGeneratorMIPS64::VisitLoadException(HLoadException* load) {
6183 GpuRegister out = load->GetLocations()->Out().AsRegister<GpuRegister>();
David Brazdilcb1c0552015-08-04 16:22:25 +01006184 __ LoadFromOffset(kLoadUnsignedWord, out, TR, GetExceptionTlsOffset());
6185}
6186
6187void LocationsBuilderMIPS64::VisitClearException(HClearException* clear) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006188 new (GetGraph()->GetAllocator()) LocationSummary(clear, LocationSummary::kNoCall);
David Brazdilcb1c0552015-08-04 16:22:25 +01006189}
6190
6191void InstructionCodeGeneratorMIPS64::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
6192 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006193}
6194
Alexey Frunze4dda3372015-06-01 18:31:49 -07006195void LocationsBuilderMIPS64::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006196 HLoadString::LoadKind load_kind = load->GetLoadKind();
6197 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Vladimir Markoca6fff82017-10-03 14:49:14 +01006198 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(load, call_kind);
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006199 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006200 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006201 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzef63f5692016-12-13 17:43:11 -08006202 } else {
6203 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07006204 if (load_kind == HLoadString::LoadKind::kBssEntry) {
6205 if (!kUseReadBarrier || kUseBakerReadBarrier) {
6206 // Rely on the pResolveString and marking to save everything we need.
6207 RegisterSet caller_saves = RegisterSet::Empty();
6208 InvokeRuntimeCallingConvention calling_convention;
6209 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6210 locations->SetCustomSlowPathCallerSaves(caller_saves);
6211 } else {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006212 // For non-Baker read barriers we have a temp-clobbering call.
Alexey Frunzec61c0762017-04-10 13:54:23 -07006213 }
6214 }
Alexey Frunzef63f5692016-12-13 17:43:11 -08006215 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006216}
6217
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006218// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
6219// move.
6220void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006221 HLoadString::LoadKind load_kind = load->GetLoadKind();
6222 LocationSummary* locations = load->GetLocations();
6223 Location out_loc = locations->Out();
6224 GpuRegister out = out_loc.AsRegister<GpuRegister>();
6225
6226 switch (load_kind) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006227 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
6228 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006229 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006230 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006231 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6232 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
6233 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006234 __ Daddiu(out, AT, /* placeholder */ 0x5678);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006235 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006236 }
6237 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00006238 uint32_t address = dchecked_integral_cast<uint32_t>(
6239 reinterpret_cast<uintptr_t>(load->GetString().Get()));
6240 DCHECK_NE(address, 0u);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006241 __ LoadLiteral(out,
6242 kLoadUnsignedWord,
6243 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006244 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006245 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006246 case HLoadString::LoadKind::kBootImageInternTable: {
Alexey Frunzef63f5692016-12-13 17:43:11 -08006247 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006248 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00006249 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006250 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6251 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01006252 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
6253 __ Lwu(out, AT, /* placeholder */ 0x5678);
6254 return;
6255 }
6256 case HLoadString::LoadKind::kBssEntry: {
6257 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
6258 CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
6259 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
6260 CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
6261 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Markof3c52b42017-11-17 17:32:12 +00006262 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, out);
Alexey Frunze15958152017-02-09 19:08:30 -08006263 GenerateGcRootFieldLoad(load,
6264 out_loc,
Vladimir Markof3c52b42017-11-17 17:32:12 +00006265 out,
Alexey Frunze15958152017-02-09 19:08:30 -08006266 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006267 kCompilerReadBarrierOption,
6268 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07006269 SlowPathCodeMIPS64* slow_path =
Vladimir Markof3c52b42017-11-17 17:32:12 +00006270 new (codegen_->GetScopedAllocator()) LoadStringSlowPathMIPS64(load);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006271 codegen_->AddSlowPath(slow_path);
6272 __ Beqzc(out, slow_path->GetEntryLabel());
6273 __ Bind(slow_path->GetExitLabel());
6274 return;
6275 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08006276 case HLoadString::LoadKind::kJitTableAddress:
6277 __ LoadLiteral(out,
6278 kLoadUnsignedWord,
6279 codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
6280 load->GetStringIndex(),
6281 load->GetString()));
Alexey Frunze15958152017-02-09 19:08:30 -08006282 GenerateGcRootFieldLoad(load, out_loc, out, 0, kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08006283 return;
Alexey Frunzef63f5692016-12-13 17:43:11 -08006284 default:
6285 break;
6286 }
6287
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07006288 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01006289 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006290 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006291 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Alexey Frunzef63f5692016-12-13 17:43:11 -08006292 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
6293 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
6294 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006295}
6296
Alexey Frunze4dda3372015-06-01 18:31:49 -07006297void LocationsBuilderMIPS64::VisitLongConstant(HLongConstant* constant) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006298 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(constant);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006299 locations->SetOut(Location::ConstantLocation(constant));
6300}
6301
6302void InstructionCodeGeneratorMIPS64::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
6303 // Will be generated at use site.
6304}
6305
6306void LocationsBuilderMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006307 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6308 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006309 InvokeRuntimeCallingConvention calling_convention;
6310 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6311}
6312
6313void InstructionCodeGeneratorMIPS64::VisitMonitorOperation(HMonitorOperation* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006314 codegen_->InvokeRuntime(instruction->IsEnter() ? kQuickLockObject : kQuickUnlockObject,
Alexey Frunze4dda3372015-06-01 18:31:49 -07006315 instruction,
Serban Constantinescufc734082016-07-19 17:18:07 +01006316 instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00006317 if (instruction->IsEnter()) {
6318 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
6319 } else {
6320 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
6321 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006322}
6323
6324void LocationsBuilderMIPS64::VisitMul(HMul* mul) {
6325 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006326 new (GetGraph()->GetAllocator()) LocationSummary(mul, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006327 switch (mul->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006328 case DataType::Type::kInt32:
6329 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006330 locations->SetInAt(0, Location::RequiresRegister());
6331 locations->SetInAt(1, Location::RequiresRegister());
6332 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6333 break;
6334
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006335 case DataType::Type::kFloat32:
6336 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006337 locations->SetInAt(0, Location::RequiresFpuRegister());
6338 locations->SetInAt(1, Location::RequiresFpuRegister());
6339 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6340 break;
6341
6342 default:
6343 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
6344 }
6345}
6346
6347void InstructionCodeGeneratorMIPS64::VisitMul(HMul* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006348 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006349 LocationSummary* locations = instruction->GetLocations();
6350
6351 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006352 case DataType::Type::kInt32:
6353 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006354 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6355 GpuRegister lhs = locations->InAt(0).AsRegister<GpuRegister>();
6356 GpuRegister rhs = locations->InAt(1).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006357 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006358 __ MulR6(dst, lhs, rhs);
6359 else
6360 __ Dmul(dst, lhs, rhs);
6361 break;
6362 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006363 case DataType::Type::kFloat32:
6364 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006365 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6366 FpuRegister lhs = locations->InAt(0).AsFpuRegister<FpuRegister>();
6367 FpuRegister rhs = locations->InAt(1).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006368 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006369 __ MulS(dst, lhs, rhs);
6370 else
6371 __ MulD(dst, lhs, rhs);
6372 break;
6373 }
6374 default:
6375 LOG(FATAL) << "Unexpected mul type " << type;
6376 }
6377}
6378
6379void LocationsBuilderMIPS64::VisitNeg(HNeg* neg) {
6380 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006381 new (GetGraph()->GetAllocator()) LocationSummary(neg, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006382 switch (neg->GetResultType()) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006383 case DataType::Type::kInt32:
6384 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006385 locations->SetInAt(0, Location::RequiresRegister());
6386 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6387 break;
6388
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006389 case DataType::Type::kFloat32:
6390 case DataType::Type::kFloat64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006391 locations->SetInAt(0, Location::RequiresFpuRegister());
6392 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
6393 break;
6394
6395 default:
6396 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
6397 }
6398}
6399
6400void InstructionCodeGeneratorMIPS64::VisitNeg(HNeg* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006401 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006402 LocationSummary* locations = instruction->GetLocations();
6403
6404 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006405 case DataType::Type::kInt32:
6406 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006407 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6408 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006409 if (type == DataType::Type::kInt32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006410 __ Subu(dst, ZERO, src);
6411 else
6412 __ Dsubu(dst, ZERO, src);
6413 break;
6414 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006415 case DataType::Type::kFloat32:
6416 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006417 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6418 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006419 if (type == DataType::Type::kFloat32)
Alexey Frunze4dda3372015-06-01 18:31:49 -07006420 __ NegS(dst, src);
6421 else
6422 __ NegD(dst, src);
6423 break;
6424 }
6425 default:
6426 LOG(FATAL) << "Unexpected neg type " << type;
6427 }
6428}
6429
6430void LocationsBuilderMIPS64::VisitNewArray(HNewArray* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006431 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6432 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006433 InvokeRuntimeCallingConvention calling_convention;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006434 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006435 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6436 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006437}
6438
6439void InstructionCodeGeneratorMIPS64::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006440 // Note: if heap poisoning is enabled, the entry point takes care
6441 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02006442 QuickEntrypointEnum entrypoint =
6443 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
6444 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00006445 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02006446 DCHECK(!codegen_->IsLeafMethod());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006447}
6448
6449void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006450 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6451 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006452 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00006453 if (instruction->IsStringAlloc()) {
6454 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
6455 } else {
6456 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00006457 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006458 locations->SetOut(calling_convention.GetReturnLocation(DataType::Type::kReference));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006459}
6460
6461void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08006462 // Note: if heap poisoning is enabled, the entry point takes care
6463 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00006464 if (instruction->IsStringAlloc()) {
6465 // String is allocated through StringFactory. Call NewEmptyString entry point.
6466 GpuRegister temp = instruction->GetLocations()->GetTemp(0).AsRegister<GpuRegister>();
Lazar Trsicd9672662015-09-03 17:33:01 +02006467 MemberOffset code_offset =
Andreas Gampe542451c2016-07-26 09:02:02 -07006468 ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64PointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00006469 __ LoadFromOffset(kLoadDoubleword, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
6470 __ LoadFromOffset(kLoadDoubleword, T9, temp, code_offset.Int32Value());
6471 __ Jalr(T9);
6472 __ Nop();
6473 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6474 } else {
Serban Constantinescufc734082016-07-19 17:18:07 +01006475 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00006476 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00006477 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006478}
6479
6480void LocationsBuilderMIPS64::VisitNot(HNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006481 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006482 locations->SetInAt(0, Location::RequiresRegister());
6483 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6484}
6485
6486void InstructionCodeGeneratorMIPS64::VisitNot(HNot* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006487 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006488 LocationSummary* locations = instruction->GetLocations();
6489
6490 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006491 case DataType::Type::kInt32:
6492 case DataType::Type::kInt64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006493 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6494 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6495 __ Nor(dst, src, ZERO);
6496 break;
6497 }
6498
6499 default:
6500 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
6501 }
6502}
6503
6504void LocationsBuilderMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006505 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006506 locations->SetInAt(0, Location::RequiresRegister());
6507 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6508}
6509
6510void InstructionCodeGeneratorMIPS64::VisitBooleanNot(HBooleanNot* instruction) {
6511 LocationSummary* locations = instruction->GetLocations();
6512 __ Xori(locations->Out().AsRegister<GpuRegister>(),
6513 locations->InAt(0).AsRegister<GpuRegister>(),
6514 1);
6515}
6516
6517void LocationsBuilderMIPS64::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01006518 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
6519 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006520}
6521
Calin Juravle2ae48182016-03-16 14:05:09 +00006522void CodeGeneratorMIPS64::GenerateImplicitNullCheck(HNullCheck* instruction) {
6523 if (CanMoveNullCheckToUser(instruction)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006524 return;
6525 }
6526 Location obj = instruction->GetLocations()->InAt(0);
6527
6528 __ Lw(ZERO, obj.AsRegister<GpuRegister>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00006529 RecordPcInfo(instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006530}
6531
Calin Juravle2ae48182016-03-16 14:05:09 +00006532void CodeGeneratorMIPS64::GenerateExplicitNullCheck(HNullCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006533 SlowPathCodeMIPS64* slow_path =
Vladimir Marko174b2e22017-10-12 13:34:49 +01006534 new (GetScopedAllocator()) NullCheckSlowPathMIPS64(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00006535 AddSlowPath(slow_path);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006536
6537 Location obj = instruction->GetLocations()->InAt(0);
6538
6539 __ Beqzc(obj.AsRegister<GpuRegister>(), slow_path->GetEntryLabel());
6540}
6541
6542void InstructionCodeGeneratorMIPS64::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00006543 codegen_->GenerateNullCheck(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006544}
6545
6546void LocationsBuilderMIPS64::VisitOr(HOr* instruction) {
6547 HandleBinaryOp(instruction);
6548}
6549
6550void InstructionCodeGeneratorMIPS64::VisitOr(HOr* instruction) {
6551 HandleBinaryOp(instruction);
6552}
6553
6554void LocationsBuilderMIPS64::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
6555 LOG(FATAL) << "Unreachable";
6556}
6557
6558void InstructionCodeGeneratorMIPS64::VisitParallelMove(HParallelMove* instruction) {
Vladimir Markobea75ff2017-10-11 20:39:54 +01006559 if (instruction->GetNext()->IsSuspendCheck() &&
6560 instruction->GetBlock()->GetLoopInformation() != nullptr) {
6561 HSuspendCheck* suspend_check = instruction->GetNext()->AsSuspendCheck();
6562 // The back edge will generate the suspend check.
6563 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(suspend_check, instruction);
6564 }
6565
Alexey Frunze4dda3372015-06-01 18:31:49 -07006566 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
6567}
6568
6569void LocationsBuilderMIPS64::VisitParameterValue(HParameterValue* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006570 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006571 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
6572 if (location.IsStackSlot()) {
6573 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6574 } else if (location.IsDoubleStackSlot()) {
6575 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
6576 }
6577 locations->SetOut(location);
6578}
6579
6580void InstructionCodeGeneratorMIPS64::VisitParameterValue(HParameterValue* instruction
6581 ATTRIBUTE_UNUSED) {
6582 // Nothing to do, the parameter is already at its location.
6583}
6584
6585void LocationsBuilderMIPS64::VisitCurrentMethod(HCurrentMethod* instruction) {
6586 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01006587 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006588 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
6589}
6590
6591void InstructionCodeGeneratorMIPS64::VisitCurrentMethod(HCurrentMethod* instruction
6592 ATTRIBUTE_UNUSED) {
6593 // Nothing to do, the method is already at its location.
6594}
6595
6596void LocationsBuilderMIPS64::VisitPhi(HPhi* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006597 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01006598 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006599 locations->SetInAt(i, Location::Any());
6600 }
6601 locations->SetOut(Location::Any());
6602}
6603
6604void InstructionCodeGeneratorMIPS64::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
6605 LOG(FATAL) << "Unreachable";
6606}
6607
6608void LocationsBuilderMIPS64::VisitRem(HRem* rem) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006609 DataType::Type type = rem->GetResultType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006610 LocationSummary::CallKind call_kind =
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006611 DataType::IsFloatingPointType(type) ? LocationSummary::kCallOnMainOnly
6612 : LocationSummary::kNoCall;
Vladimir Markoca6fff82017-10-03 14:49:14 +01006613 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(rem, call_kind);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006614
6615 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006616 case DataType::Type::kInt32:
6617 case DataType::Type::kInt64:
Alexey Frunze4dda3372015-06-01 18:31:49 -07006618 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunzec857c742015-09-23 15:12:39 -07006619 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Alexey Frunze4dda3372015-06-01 18:31:49 -07006620 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
6621 break;
6622
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006623 case DataType::Type::kFloat32:
6624 case DataType::Type::kFloat64: {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006625 InvokeRuntimeCallingConvention calling_convention;
6626 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
6627 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
6628 locations->SetOut(calling_convention.GetReturnLocation(type));
6629 break;
6630 }
6631
6632 default:
6633 LOG(FATAL) << "Unexpected rem type " << type;
6634 }
6635}
6636
6637void InstructionCodeGeneratorMIPS64::VisitRem(HRem* instruction) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006638 DataType::Type type = instruction->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006639
6640 switch (type) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006641 case DataType::Type::kInt32:
6642 case DataType::Type::kInt64:
Alexey Frunzec857c742015-09-23 15:12:39 -07006643 GenerateDivRemIntegral(instruction);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006644 break;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006645
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006646 case DataType::Type::kFloat32:
6647 case DataType::Type::kFloat64: {
6648 QuickEntrypointEnum entrypoint =
6649 (type == DataType::Type::kFloat32) ? kQuickFmodf : kQuickFmod;
Serban Constantinescufc734082016-07-19 17:18:07 +01006650 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006651 if (type == DataType::Type::kFloat32) {
Roland Levillain888d0672015-11-23 18:53:50 +00006652 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
6653 } else {
6654 CheckEntrypointTypes<kQuickFmod, double, double, double>();
6655 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006656 break;
6657 }
6658 default:
6659 LOG(FATAL) << "Unexpected rem type " << type;
6660 }
6661}
6662
Igor Murashkind01745e2017-04-05 16:40:31 -07006663void LocationsBuilderMIPS64::VisitConstructorFence(HConstructorFence* constructor_fence) {
6664 constructor_fence->SetLocations(nullptr);
6665}
6666
6667void InstructionCodeGeneratorMIPS64::VisitConstructorFence(
6668 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
6669 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
6670}
6671
Alexey Frunze4dda3372015-06-01 18:31:49 -07006672void LocationsBuilderMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6673 memory_barrier->SetLocations(nullptr);
6674}
6675
6676void InstructionCodeGeneratorMIPS64::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
6677 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
6678}
6679
6680void LocationsBuilderMIPS64::VisitReturn(HReturn* ret) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006681 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(ret);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006682 DataType::Type return_type = ret->InputAt(0)->GetType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006683 locations->SetInAt(0, Mips64ReturnLocation(return_type));
6684}
6685
6686void InstructionCodeGeneratorMIPS64::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
6687 codegen_->GenerateFrameExit();
6688}
6689
6690void LocationsBuilderMIPS64::VisitReturnVoid(HReturnVoid* ret) {
6691 ret->SetLocations(nullptr);
6692}
6693
6694void InstructionCodeGeneratorMIPS64::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
6695 codegen_->GenerateFrameExit();
6696}
6697
Alexey Frunze92d90602015-12-18 18:16:36 -08006698void LocationsBuilderMIPS64::VisitRor(HRor* ror) {
6699 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006700}
6701
Alexey Frunze92d90602015-12-18 18:16:36 -08006702void InstructionCodeGeneratorMIPS64::VisitRor(HRor* ror) {
6703 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00006704}
6705
Alexey Frunze4dda3372015-06-01 18:31:49 -07006706void LocationsBuilderMIPS64::VisitShl(HShl* shl) {
6707 HandleShift(shl);
6708}
6709
6710void InstructionCodeGeneratorMIPS64::VisitShl(HShl* shl) {
6711 HandleShift(shl);
6712}
6713
6714void LocationsBuilderMIPS64::VisitShr(HShr* shr) {
6715 HandleShift(shr);
6716}
6717
6718void InstructionCodeGeneratorMIPS64::VisitShr(HShr* shr) {
6719 HandleShift(shr);
6720}
6721
Alexey Frunze4dda3372015-06-01 18:31:49 -07006722void LocationsBuilderMIPS64::VisitSub(HSub* instruction) {
6723 HandleBinaryOp(instruction);
6724}
6725
6726void InstructionCodeGeneratorMIPS64::VisitSub(HSub* instruction) {
6727 HandleBinaryOp(instruction);
6728}
6729
6730void LocationsBuilderMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6731 HandleFieldGet(instruction, instruction->GetFieldInfo());
6732}
6733
6734void InstructionCodeGeneratorMIPS64::VisitStaticFieldGet(HStaticFieldGet* instruction) {
6735 HandleFieldGet(instruction, instruction->GetFieldInfo());
6736}
6737
6738void LocationsBuilderMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
6739 HandleFieldSet(instruction, instruction->GetFieldInfo());
6740}
6741
6742void InstructionCodeGeneratorMIPS64::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevic8ed18262016-01-22 13:01:00 +01006743 HandleFieldSet(instruction, instruction->GetFieldInfo(), instruction->GetValueCanBeNull());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006744}
6745
Calin Juravlee460d1d2015-09-29 04:52:17 +01006746void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldGet(
6747 HUnresolvedInstanceFieldGet* instruction) {
6748 FieldAccessCallingConventionMIPS64 calling_convention;
6749 codegen_->CreateUnresolvedFieldLocationSummary(
6750 instruction, instruction->GetFieldType(), calling_convention);
6751}
6752
6753void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldGet(
6754 HUnresolvedInstanceFieldGet* instruction) {
6755 FieldAccessCallingConventionMIPS64 calling_convention;
6756 codegen_->GenerateUnresolvedFieldAccess(instruction,
6757 instruction->GetFieldType(),
6758 instruction->GetFieldIndex(),
6759 instruction->GetDexPc(),
6760 calling_convention);
6761}
6762
6763void LocationsBuilderMIPS64::VisitUnresolvedInstanceFieldSet(
6764 HUnresolvedInstanceFieldSet* instruction) {
6765 FieldAccessCallingConventionMIPS64 calling_convention;
6766 codegen_->CreateUnresolvedFieldLocationSummary(
6767 instruction, instruction->GetFieldType(), calling_convention);
6768}
6769
6770void InstructionCodeGeneratorMIPS64::VisitUnresolvedInstanceFieldSet(
6771 HUnresolvedInstanceFieldSet* instruction) {
6772 FieldAccessCallingConventionMIPS64 calling_convention;
6773 codegen_->GenerateUnresolvedFieldAccess(instruction,
6774 instruction->GetFieldType(),
6775 instruction->GetFieldIndex(),
6776 instruction->GetDexPc(),
6777 calling_convention);
6778}
6779
6780void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldGet(
6781 HUnresolvedStaticFieldGet* instruction) {
6782 FieldAccessCallingConventionMIPS64 calling_convention;
6783 codegen_->CreateUnresolvedFieldLocationSummary(
6784 instruction, instruction->GetFieldType(), calling_convention);
6785}
6786
6787void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldGet(
6788 HUnresolvedStaticFieldGet* instruction) {
6789 FieldAccessCallingConventionMIPS64 calling_convention;
6790 codegen_->GenerateUnresolvedFieldAccess(instruction,
6791 instruction->GetFieldType(),
6792 instruction->GetFieldIndex(),
6793 instruction->GetDexPc(),
6794 calling_convention);
6795}
6796
6797void LocationsBuilderMIPS64::VisitUnresolvedStaticFieldSet(
6798 HUnresolvedStaticFieldSet* instruction) {
6799 FieldAccessCallingConventionMIPS64 calling_convention;
6800 codegen_->CreateUnresolvedFieldLocationSummary(
6801 instruction, instruction->GetFieldType(), calling_convention);
6802}
6803
6804void InstructionCodeGeneratorMIPS64::VisitUnresolvedStaticFieldSet(
6805 HUnresolvedStaticFieldSet* instruction) {
6806 FieldAccessCallingConventionMIPS64 calling_convention;
6807 codegen_->GenerateUnresolvedFieldAccess(instruction,
6808 instruction->GetFieldType(),
6809 instruction->GetFieldIndex(),
6810 instruction->GetDexPc(),
6811 calling_convention);
6812}
6813
Alexey Frunze4dda3372015-06-01 18:31:49 -07006814void LocationsBuilderMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006815 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6816 instruction, LocationSummary::kCallOnSlowPath);
Goran Jakovljevicd8b6a532017-04-20 11:42:30 +02006817 // In suspend check slow path, usually there are no caller-save registers at all.
6818 // If SIMD instructions are present, however, we force spilling all live SIMD
6819 // registers in full width (since the runtime only saves/restores lower part).
6820 locations->SetCustomSlowPathCallerSaves(
6821 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006822}
6823
6824void InstructionCodeGeneratorMIPS64::VisitSuspendCheck(HSuspendCheck* instruction) {
6825 HBasicBlock* block = instruction->GetBlock();
6826 if (block->GetLoopInformation() != nullptr) {
6827 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
6828 // The back edge will generate the suspend check.
6829 return;
6830 }
6831 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
6832 // The goto will generate the suspend check.
6833 return;
6834 }
6835 GenerateSuspendCheck(instruction, nullptr);
6836}
6837
Alexey Frunze4dda3372015-06-01 18:31:49 -07006838void LocationsBuilderMIPS64::VisitThrow(HThrow* instruction) {
Vladimir Markoca6fff82017-10-03 14:49:14 +01006839 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(
6840 instruction, LocationSummary::kCallOnMainOnly);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006841 InvokeRuntimeCallingConvention calling_convention;
6842 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6843}
6844
6845void InstructionCodeGeneratorMIPS64::VisitThrow(HThrow* instruction) {
Serban Constantinescufc734082016-07-19 17:18:07 +01006846 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006847 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
6848}
6849
6850void LocationsBuilderMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006851 DataType::Type input_type = conversion->GetInputType();
6852 DataType::Type result_type = conversion->GetResultType();
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006853 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6854 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006855
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006856 if ((input_type == DataType::Type::kReference) || (input_type == DataType::Type::kVoid) ||
6857 (result_type == DataType::Type::kReference) || (result_type == DataType::Type::kVoid)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006858 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
6859 }
6860
Vladimir Markoca6fff82017-10-03 14:49:14 +01006861 LocationSummary* locations = new (GetGraph()->GetAllocator()) LocationSummary(conversion);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006862
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006863 if (DataType::IsFloatingPointType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006864 locations->SetInAt(0, Location::RequiresFpuRegister());
6865 } else {
6866 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze4dda3372015-06-01 18:31:49 -07006867 }
6868
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006869 if (DataType::IsFloatingPointType(result_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006870 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006871 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006872 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006873 }
6874}
6875
6876void InstructionCodeGeneratorMIPS64::VisitTypeConversion(HTypeConversion* conversion) {
6877 LocationSummary* locations = conversion->GetLocations();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006878 DataType::Type result_type = conversion->GetResultType();
6879 DataType::Type input_type = conversion->GetInputType();
Alexey Frunze4dda3372015-06-01 18:31:49 -07006880
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006881 DCHECK(!DataType::IsTypeConversionImplicit(input_type, result_type))
6882 << input_type << " -> " << result_type;
Alexey Frunze4dda3372015-06-01 18:31:49 -07006883
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006884 if (DataType::IsIntegralType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006885 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6886 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
6887
6888 switch (result_type) {
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006889 case DataType::Type::kUint8:
6890 __ Andi(dst, src, 0xFF);
Alexey Frunze4dda3372015-06-01 18:31:49 -07006891 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006892 case DataType::Type::kInt8:
6893 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006894 // Type conversion from long to types narrower than int is a result of code
6895 // transformations. To avoid unpredictable results for SEB and SEH, we first
6896 // need to sign-extend the low 32-bit value into bits 32 through 63.
6897 __ Sll(dst, src, 0);
6898 __ Seb(dst, dst);
6899 } else {
6900 __ Seb(dst, src);
6901 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006902 break;
Vladimir Markod5d2f2c2017-09-26 12:37:26 +01006903 case DataType::Type::kUint16:
6904 __ Andi(dst, src, 0xFFFF);
6905 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006906 case DataType::Type::kInt16:
6907 if (input_type == DataType::Type::kInt64) {
Vladimir Markob52bbde2016-02-12 12:06:05 +00006908 // Type conversion from long to types narrower than int is a result of code
6909 // transformations. To avoid unpredictable results for SEB and SEH, we first
6910 // need to sign-extend the low 32-bit value into bits 32 through 63.
6911 __ Sll(dst, src, 0);
6912 __ Seh(dst, dst);
6913 } else {
6914 __ Seh(dst, src);
6915 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006916 break;
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006917 case DataType::Type::kInt32:
6918 case DataType::Type::kInt64:
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006919 // Sign-extend 32-bit int into bits 32 through 63 for int-to-long and long-to-int
6920 // conversions, except when the input and output registers are the same and we are not
6921 // converting longs to shorter types. In these cases, do nothing.
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006922 if ((input_type == DataType::Type::kInt64) || (dst != src)) {
Goran Jakovljevic992bdb92016-12-28 16:21:48 +01006923 __ Sll(dst, src, 0);
6924 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006925 break;
6926
6927 default:
6928 LOG(FATAL) << "Unexpected type conversion from " << input_type
6929 << " to " << result_type;
6930 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006931 } else if (DataType::IsFloatingPointType(result_type) && DataType::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006932 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6933 GpuRegister src = locations->InAt(0).AsRegister<GpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006934 if (input_type == DataType::Type::kInt64) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006935 __ Dmtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006936 if (result_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006937 __ Cvtsl(dst, FTMP);
6938 } else {
6939 __ Cvtdl(dst, FTMP);
6940 }
6941 } else {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006942 __ Mtc1(src, FTMP);
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006943 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006944 __ Cvtsw(dst, FTMP);
6945 } else {
6946 __ Cvtdw(dst, FTMP);
6947 }
Alexey Frunze4dda3372015-06-01 18:31:49 -07006948 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006949 } else if (DataType::IsIntegralType(result_type) && DataType::IsFloatingPointType(input_type)) {
6950 CHECK(result_type == DataType::Type::kInt32 || result_type == DataType::Type::kInt64);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006951 GpuRegister dst = locations->Out().AsRegister<GpuRegister>();
6952 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006953
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006954 if (result_type == DataType::Type::kInt64) {
6955 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006956 __ TruncLS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006957 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006958 __ TruncLD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006959 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006960 __ Dmfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006961 } else {
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006962 if (input_type == DataType::Type::kFloat32) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006963 __ TruncWS(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006964 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006965 __ TruncWD(FTMP, src);
Roland Levillain888d0672015-11-23 18:53:50 +00006966 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08006967 __ Mfc1(dst, FTMP);
Roland Levillain888d0672015-11-23 18:53:50 +00006968 }
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006969 } else if (DataType::IsFloatingPointType(result_type) &&
6970 DataType::IsFloatingPointType(input_type)) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006971 FpuRegister dst = locations->Out().AsFpuRegister<FpuRegister>();
6972 FpuRegister src = locations->InAt(0).AsFpuRegister<FpuRegister>();
Vladimir Marko0ebe0d82017-09-21 22:50:39 +01006973 if (result_type == DataType::Type::kFloat32) {
Alexey Frunze4dda3372015-06-01 18:31:49 -07006974 __ Cvtsd(dst, src);
6975 } else {
6976 __ Cvtds(dst, src);
6977 }
6978 } else {
6979 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
6980 << " to " << result_type;
6981 }
6982}
6983
6984void LocationsBuilderMIPS64::VisitUShr(HUShr* ushr) {
6985 HandleShift(ushr);
6986}
6987
6988void InstructionCodeGeneratorMIPS64::VisitUShr(HUShr* ushr) {
6989 HandleShift(ushr);
6990}
6991
6992void LocationsBuilderMIPS64::VisitXor(HXor* instruction) {
6993 HandleBinaryOp(instruction);
6994}
6995
6996void InstructionCodeGeneratorMIPS64::VisitXor(HXor* instruction) {
6997 HandleBinaryOp(instruction);
6998}
6999
7000void LocationsBuilderMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7001 // Nothing to do, this should be removed during prepare for register allocator.
7002 LOG(FATAL) << "Unreachable";
7003}
7004
7005void InstructionCodeGeneratorMIPS64::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
7006 // Nothing to do, this should be removed during prepare for register allocator.
7007 LOG(FATAL) << "Unreachable";
7008}
7009
7010void LocationsBuilderMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007011 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007012}
7013
7014void InstructionCodeGeneratorMIPS64::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007015 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007016}
7017
7018void LocationsBuilderMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007019 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007020}
7021
7022void InstructionCodeGeneratorMIPS64::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007023 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007024}
7025
7026void LocationsBuilderMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007027 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007028}
7029
7030void InstructionCodeGeneratorMIPS64::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007031 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007032}
7033
7034void LocationsBuilderMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007035 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007036}
7037
7038void InstructionCodeGeneratorMIPS64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007039 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007040}
7041
7042void LocationsBuilderMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007043 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007044}
7045
7046void InstructionCodeGeneratorMIPS64::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007047 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007048}
7049
7050void LocationsBuilderMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007051 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007052}
7053
7054void InstructionCodeGeneratorMIPS64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007055 HandleCondition(comp);
Alexey Frunze4dda3372015-06-01 18:31:49 -07007056}
7057
Aart Bike9f37602015-10-09 11:15:55 -07007058void LocationsBuilderMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007059 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007060}
7061
7062void InstructionCodeGeneratorMIPS64::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007063 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007064}
7065
7066void LocationsBuilderMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007067 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007068}
7069
7070void InstructionCodeGeneratorMIPS64::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007071 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007072}
7073
7074void LocationsBuilderMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007075 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007076}
7077
7078void InstructionCodeGeneratorMIPS64::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007079 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007080}
7081
7082void LocationsBuilderMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007083 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007084}
7085
7086void InstructionCodeGeneratorMIPS64::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00007087 HandleCondition(comp);
Aart Bike9f37602015-10-09 11:15:55 -07007088}
7089
Mark Mendellfe57faa2015-09-18 09:26:15 -04007090// Simple implementation of packed switch - generate cascaded compare/jumps.
7091void LocationsBuilderMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7092 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007093 new (GetGraph()->GetAllocator()) LocationSummary(switch_instr, LocationSummary::kNoCall);
Mark Mendellfe57faa2015-09-18 09:26:15 -04007094 locations->SetInAt(0, Location::RequiresRegister());
7095}
7096
Alexey Frunze0960ac52016-12-20 17:24:59 -08007097void InstructionCodeGeneratorMIPS64::GenPackedSwitchWithCompares(GpuRegister value_reg,
7098 int32_t lower_bound,
7099 uint32_t num_entries,
7100 HBasicBlock* switch_block,
7101 HBasicBlock* default_block) {
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007102 // Create a set of compare/jumps.
7103 GpuRegister temp_reg = TMP;
Alexey Frunze0960ac52016-12-20 17:24:59 -08007104 __ Addiu32(temp_reg, value_reg, -lower_bound);
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007105 // Jump to default if index is negative
7106 // Note: We don't check the case that index is positive while value < lower_bound, because in
7107 // this case, index >= num_entries must be true. So that we can save one branch instruction.
7108 __ Bltzc(temp_reg, codegen_->GetLabelOf(default_block));
7109
Alexey Frunze0960ac52016-12-20 17:24:59 -08007110 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00007111 // Jump to successors[0] if value == lower_bound.
7112 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[0]));
7113 int32_t last_index = 0;
7114 for (; num_entries - last_index > 2; last_index += 2) {
7115 __ Addiu(temp_reg, temp_reg, -2);
7116 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
7117 __ Bltzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
7118 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
7119 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
7120 }
7121 if (num_entries - last_index == 2) {
7122 // The last missing case_value.
7123 __ Addiu(temp_reg, temp_reg, -1);
7124 __ Beqzc(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007125 }
7126
7127 // And the default for any other value.
Alexey Frunze0960ac52016-12-20 17:24:59 -08007128 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Alexey Frunzea0e87b02015-09-24 22:57:20 -07007129 __ Bc(codegen_->GetLabelOf(default_block));
Mark Mendellfe57faa2015-09-18 09:26:15 -04007130 }
7131}
7132
Alexey Frunze0960ac52016-12-20 17:24:59 -08007133void InstructionCodeGeneratorMIPS64::GenTableBasedPackedSwitch(GpuRegister value_reg,
7134 int32_t lower_bound,
7135 uint32_t num_entries,
7136 HBasicBlock* switch_block,
7137 HBasicBlock* default_block) {
7138 // Create a jump table.
7139 std::vector<Mips64Label*> labels(num_entries);
7140 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
7141 for (uint32_t i = 0; i < num_entries; i++) {
7142 labels[i] = codegen_->GetLabelOf(successors[i]);
7143 }
7144 JumpTable* table = __ CreateJumpTable(std::move(labels));
7145
7146 // Is the value in range?
7147 __ Addiu32(TMP, value_reg, -lower_bound);
7148 __ LoadConst32(AT, num_entries);
7149 __ Bgeuc(TMP, AT, codegen_->GetLabelOf(default_block));
7150
7151 // We are in the range of the table.
7152 // Load the target address from the jump table, indexing by the value.
7153 __ LoadLabelAddress(AT, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07007154 __ Dlsa(TMP, TMP, AT, 2);
Alexey Frunze0960ac52016-12-20 17:24:59 -08007155 __ Lw(TMP, TMP, 0);
7156 // Compute the absolute target address by adding the table start address
7157 // (the table contains offsets to targets relative to its start).
7158 __ Daddu(TMP, TMP, AT);
7159 // And jump.
7160 __ Jr(TMP);
7161 __ Nop();
7162}
7163
7164void InstructionCodeGeneratorMIPS64::VisitPackedSwitch(HPackedSwitch* switch_instr) {
7165 int32_t lower_bound = switch_instr->GetStartValue();
7166 uint32_t num_entries = switch_instr->GetNumEntries();
7167 LocationSummary* locations = switch_instr->GetLocations();
7168 GpuRegister value_reg = locations->InAt(0).AsRegister<GpuRegister>();
7169 HBasicBlock* switch_block = switch_instr->GetBlock();
7170 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
7171
7172 if (num_entries > kPackedSwitchJumpTableThreshold) {
7173 GenTableBasedPackedSwitch(value_reg,
7174 lower_bound,
7175 num_entries,
7176 switch_block,
7177 default_block);
7178 } else {
7179 GenPackedSwitchWithCompares(value_reg,
7180 lower_bound,
7181 num_entries,
7182 switch_block,
7183 default_block);
7184 }
7185}
7186
Chris Larsenc9905a62017-03-13 17:06:18 -07007187void LocationsBuilderMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7188 LocationSummary* locations =
Vladimir Markoca6fff82017-10-03 14:49:14 +01007189 new (GetGraph()->GetAllocator()) LocationSummary(instruction, LocationSummary::kNoCall);
Chris Larsenc9905a62017-03-13 17:06:18 -07007190 locations->SetInAt(0, Location::RequiresRegister());
7191 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007192}
7193
Chris Larsenc9905a62017-03-13 17:06:18 -07007194void InstructionCodeGeneratorMIPS64::VisitClassTableGet(HClassTableGet* instruction) {
7195 LocationSummary* locations = instruction->GetLocations();
7196 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
7197 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7198 instruction->GetIndex(), kMips64PointerSize).SizeValue();
7199 __ LoadFromOffset(kLoadDoubleword,
7200 locations->Out().AsRegister<GpuRegister>(),
7201 locations->InAt(0).AsRegister<GpuRegister>(),
7202 method_offset);
7203 } else {
7204 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
7205 instruction->GetIndex(), kMips64PointerSize));
7206 __ LoadFromOffset(kLoadDoubleword,
7207 locations->Out().AsRegister<GpuRegister>(),
7208 locations->InAt(0).AsRegister<GpuRegister>(),
7209 mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value());
7210 __ LoadFromOffset(kLoadDoubleword,
7211 locations->Out().AsRegister<GpuRegister>(),
7212 locations->Out().AsRegister<GpuRegister>(),
7213 method_offset);
7214 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00007215}
7216
xueliang.zhonge0eb4832017-10-30 13:43:14 +00007217void LocationsBuilderMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7218 ATTRIBUTE_UNUSED) {
7219 LOG(FATAL) << "Unreachable";
7220}
7221
7222void InstructionCodeGeneratorMIPS64::VisitIntermediateAddress(HIntermediateAddress* instruction
7223 ATTRIBUTE_UNUSED) {
7224 LOG(FATAL) << "Unreachable";
7225}
7226
Alexey Frunze4dda3372015-06-01 18:31:49 -07007227} // namespace mips64
7228} // namespace art