blob: 5e9148e18c729287ec8e7f1aa363bd3115223a2d [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
19#include "arch/mips/entrypoints_direct_mips.h"
20#include "arch/mips/instruction_set_features_mips.h"
21#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070022#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010023#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020024#include "entrypoints/quick/quick_entrypoints.h"
25#include "entrypoints/quick/quick_entrypoints_enum.h"
26#include "gc/accounting/card_table.h"
27#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070028#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020029#include "mirror/array-inl.h"
30#include "mirror/class-inl.h"
31#include "offsets.h"
32#include "thread.h"
33#include "utils/assembler.h"
34#include "utils/mips/assembler_mips.h"
35#include "utils/stack_checks.h"
36
37namespace art {
38namespace mips {
39
40static constexpr int kCurrentMethodStackOffset = 0;
41static constexpr Register kMethodRegisterArgument = A0;
42
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020043Location MipsReturnLocation(Primitive::Type return_type) {
44 switch (return_type) {
45 case Primitive::kPrimBoolean:
46 case Primitive::kPrimByte:
47 case Primitive::kPrimChar:
48 case Primitive::kPrimShort:
49 case Primitive::kPrimInt:
50 case Primitive::kPrimNot:
51 return Location::RegisterLocation(V0);
52
53 case Primitive::kPrimLong:
54 return Location::RegisterPairLocation(V0, V1);
55
56 case Primitive::kPrimFloat:
57 case Primitive::kPrimDouble:
58 return Location::FpuRegisterLocation(F0);
59
60 case Primitive::kPrimVoid:
61 return Location();
62 }
63 UNREACHABLE();
64}
65
66Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
67 return MipsReturnLocation(type);
68}
69
70Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
71 return Location::RegisterLocation(kMethodRegisterArgument);
72}
73
74Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
75 Location next_location;
76
77 switch (type) {
78 case Primitive::kPrimBoolean:
79 case Primitive::kPrimByte:
80 case Primitive::kPrimChar:
81 case Primitive::kPrimShort:
82 case Primitive::kPrimInt:
83 case Primitive::kPrimNot: {
84 uint32_t gp_index = gp_index_++;
85 if (gp_index < calling_convention.GetNumberOfRegisters()) {
86 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
87 } else {
88 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
89 next_location = Location::StackSlot(stack_offset);
90 }
91 break;
92 }
93
94 case Primitive::kPrimLong: {
95 uint32_t gp_index = gp_index_;
96 gp_index_ += 2;
97 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -080098 Register reg = calling_convention.GetRegisterAt(gp_index);
99 if (reg == A1 || reg == A3) {
100 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200101 gp_index++;
102 }
103 Register low_even = calling_convention.GetRegisterAt(gp_index);
104 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
105 DCHECK_EQ(low_even + 1, high_odd);
106 next_location = Location::RegisterPairLocation(low_even, high_odd);
107 } else {
108 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
109 next_location = Location::DoubleStackSlot(stack_offset);
110 }
111 break;
112 }
113
114 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
115 // will take up the even/odd pair, while floats are stored in even regs only.
116 // On 64 bit FPU, both double and float are stored in even registers only.
117 case Primitive::kPrimFloat:
118 case Primitive::kPrimDouble: {
119 uint32_t float_index = float_index_++;
120 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
121 next_location = Location::FpuRegisterLocation(
122 calling_convention.GetFpuRegisterAt(float_index));
123 } else {
124 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
125 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
126 : Location::StackSlot(stack_offset);
127 }
128 break;
129 }
130
131 case Primitive::kPrimVoid:
132 LOG(FATAL) << "Unexpected parameter type " << type;
133 break;
134 }
135
136 // Space on the stack is reserved for all arguments.
137 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
138
139 return next_location;
140}
141
142Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
143 return MipsReturnLocation(type);
144}
145
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100146// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
147#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700148#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200149
150class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
151 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000152 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200153
154 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
155 LocationSummary* locations = instruction_->GetLocations();
156 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
157 __ Bind(GetEntryLabel());
158 if (instruction_->CanThrowIntoCatchBlock()) {
159 // Live registers will be restored in the catch block if caught.
160 SaveLiveRegisters(codegen, instruction_->GetLocations());
161 }
162 // We're moving two locations to locations that could overlap, so we need a parallel
163 // move resolver.
164 InvokeRuntimeCallingConvention calling_convention;
165 codegen->EmitParallelMoves(locations->InAt(0),
166 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
167 Primitive::kPrimInt,
168 locations->InAt(1),
169 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
170 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100171 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
172 ? kQuickThrowStringBounds
173 : kQuickThrowArrayBounds;
174 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100175 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200176 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
177 }
178
179 bool IsFatal() const OVERRIDE { return true; }
180
181 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
182
183 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200184 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
185};
186
187class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
188 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000189 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200190
191 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
192 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
193 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100194 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200195 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
196 }
197
198 bool IsFatal() const OVERRIDE { return true; }
199
200 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
201
202 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200203 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
204};
205
206class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
207 public:
208 LoadClassSlowPathMIPS(HLoadClass* cls,
209 HInstruction* at,
210 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700211 bool do_clinit,
212 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
213 : SlowPathCodeMIPS(at),
214 cls_(cls),
215 dex_pc_(dex_pc),
216 do_clinit_(do_clinit),
217 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200218 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
219 }
220
221 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000222 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700223 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200224 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700225 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700226 InvokeRuntimeCallingConvention calling_convention;
227 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
228 const bool is_load_class_bss_entry =
229 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 __ Bind(GetEntryLabel());
231 SaveLiveRegisters(codegen, locations);
232
Alexey Frunzec61c0762017-04-10 13:54:23 -0700233 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
234 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700235 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700236 Register temp = locations->GetTemp(0).AsRegister<Register>();
237 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
238 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
239 // kSaveEverything call.
240 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
241 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
242 if (temp_is_a0) {
243 __ Move(entry_address, temp);
244 }
245 }
246
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000247 dex::TypeIndex type_index = cls_->GetTypeIndex();
248 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100249 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
250 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000251 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200252 if (do_clinit_) {
253 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
254 } else {
255 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
256 }
257
Alexey Frunzec61c0762017-04-10 13:54:23 -0700258 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700259 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700260 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700261 DCHECK(bss_info_high_);
262 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
263 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
264 bool reordering = __ SetReorder(false);
265 __ Bind(&info_low->label);
266 __ StoreToOffset(kStoreWord,
267 calling_convention.GetRegisterAt(0),
268 entry_address,
269 /* placeholder */ 0x5678);
270 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700271 }
272
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200273 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200274 if (out.IsValid()) {
275 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000276 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700277 mips_codegen->MoveLocation(out,
278 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
279 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200280 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200281 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700282
283 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700284 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
285 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700286 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700287 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000288 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700289 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000290 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700291 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
292 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800293 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700294 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -0800295 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678);
296 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000297 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200298 __ B(GetExitLabel());
299 }
300
301 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
302
303 private:
304 // The class this slow path will load.
305 HLoadClass* const cls_;
306
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200307 // The dex PC of `at_`.
308 const uint32_t dex_pc_;
309
310 // Whether to initialize the class.
311 const bool do_clinit_;
312
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700313 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
314 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
315
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200316 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
317};
318
319class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
320 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700321 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
322 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
323 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200324
325 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700326 DCHECK(instruction_->IsLoadString());
327 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200328 LocationSummary* locations = instruction_->GetLocations();
329 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700330 HLoadString* load = instruction_->AsLoadString();
331 const dex::StringIndex string_index = load->GetStringIndex();
332 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200333 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700334 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700335 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200336 __ Bind(GetEntryLabel());
337 SaveLiveRegisters(codegen, locations);
338
Alexey Frunzec61c0762017-04-10 13:54:23 -0700339 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
340 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700341 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700342 Register temp = locations->GetTemp(0).AsRegister<Register>();
343 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
344 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
345 // kSaveEverything call.
346 entry_address = temp_is_a0 ? out : temp;
347 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
348 if (temp_is_a0) {
349 __ Move(entry_address, temp);
350 }
351 }
352
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000353 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100354 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200355 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700356
357 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700358 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700359 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700360 DCHECK(bss_info_high_);
361 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
362 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
363 bool reordering = __ SetReorder(false);
364 __ Bind(&info_low->label);
365 __ StoreToOffset(kStoreWord,
366 calling_convention.GetRegisterAt(0),
367 entry_address,
368 /* placeholder */ 0x5678);
369 __ SetReorder(reordering);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700370 }
371
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200372 Primitive::Type type = instruction_->GetType();
373 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700374 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200375 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200376 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000377
Alexey Frunzec61c0762017-04-10 13:54:23 -0700378 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700379 if (!baker_or_no_read_barriers) {
380 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700381 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700382 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700383 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700384 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunzec61c0762017-04-10 13:54:23 -0700385 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700386 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
387 mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700388 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700389 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base, info_low);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700390 __ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
391 __ SetReorder(reordering);
392 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200393 __ B(GetExitLabel());
394 }
395
396 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
397
398 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700399 // Pointer to the high half PC-relative patch info.
400 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
401
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200402 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
403};
404
405class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
406 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000407 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200408
409 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
410 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
411 __ Bind(GetEntryLabel());
412 if (instruction_->CanThrowIntoCatchBlock()) {
413 // Live registers will be restored in the catch block if caught.
414 SaveLiveRegisters(codegen, instruction_->GetLocations());
415 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100416 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200417 instruction_,
418 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100419 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200420 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
421 }
422
423 bool IsFatal() const OVERRIDE { return true; }
424
425 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
426
427 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200428 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
429};
430
431class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
432 public:
433 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000434 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200435
436 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
437 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
438 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100439 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200440 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200441 if (successor_ == nullptr) {
442 __ B(GetReturnLabel());
443 } else {
444 __ B(mips_codegen->GetLabelOf(successor_));
445 }
446 }
447
448 MipsLabel* GetReturnLabel() {
449 DCHECK(successor_ == nullptr);
450 return &return_label_;
451 }
452
453 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
454
455 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200456 // If not null, the block to branch to after the suspend check.
457 HBasicBlock* const successor_;
458
459 // If `successor_` is null, the label to branch to after the suspend check.
460 MipsLabel return_label_;
461
462 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
463};
464
465class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
466 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800467 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
468 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200469
470 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
471 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200472 uint32_t dex_pc = instruction_->GetDexPc();
473 DCHECK(instruction_->IsCheckCast()
474 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
475 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
476
477 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800478 if (!is_fatal_) {
479 SaveLiveRegisters(codegen, locations);
480 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200481
482 // We're moving two locations to locations that could overlap, so we need a parallel
483 // move resolver.
484 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800485 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200486 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
487 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800488 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200489 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
490 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200491 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100492 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800493 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200494 Primitive::Type ret_type = instruction_->GetType();
495 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
496 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200497 } else {
498 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800499 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
500 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200501 }
502
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800503 if (!is_fatal_) {
504 RestoreLiveRegisters(codegen, locations);
505 __ B(GetExitLabel());
506 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200507 }
508
509 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
510
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800511 bool IsFatal() const OVERRIDE { return is_fatal_; }
512
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200513 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800514 const bool is_fatal_;
515
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200516 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
517};
518
519class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
520 public:
Aart Bik42249c32016-01-07 15:33:50 -0800521 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000522 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200523
524 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800525 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200526 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100527 LocationSummary* locations = instruction_->GetLocations();
528 SaveLiveRegisters(codegen, locations);
529 InvokeRuntimeCallingConvention calling_convention;
530 __ LoadConst32(calling_convention.GetRegisterAt(0),
531 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100532 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100533 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200534 }
535
536 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
537
538 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200539 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
540};
541
Alexey Frunze15958152017-02-09 19:08:30 -0800542class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
543 public:
544 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
545
546 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
547 LocationSummary* locations = instruction_->GetLocations();
548 __ Bind(GetEntryLabel());
549 SaveLiveRegisters(codegen, locations);
550
551 InvokeRuntimeCallingConvention calling_convention;
552 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
553 parallel_move.AddMove(
554 locations->InAt(0),
555 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
556 Primitive::kPrimNot,
557 nullptr);
558 parallel_move.AddMove(
559 locations->InAt(1),
560 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
561 Primitive::kPrimInt,
562 nullptr);
563 parallel_move.AddMove(
564 locations->InAt(2),
565 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
566 Primitive::kPrimNot,
567 nullptr);
568 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
569
570 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
571 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
572 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
573 RestoreLiveRegisters(codegen, locations);
574 __ B(GetExitLabel());
575 }
576
577 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
578
579 private:
580 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
581};
582
583// Slow path marking an object reference `ref` during a read
584// barrier. The field `obj.field` in the object `obj` holding this
585// reference does not get updated by this slow path after marking (see
586// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
587//
588// This means that after the execution of this slow path, `ref` will
589// always be up-to-date, but `obj.field` may not; i.e., after the
590// flip, `ref` will be a to-space reference, but `obj.field` will
591// probably still be a from-space reference (unless it gets updated by
592// another thread, or if another thread installed another object
593// reference (different from `ref`) in `obj.field`).
594//
595// If `entrypoint` is a valid location it is assumed to already be
596// holding the entrypoint. The case where the entrypoint is passed in
597// is for the GcRoot read barrier.
598class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
599 public:
600 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
601 Location ref,
602 Location entrypoint = Location::NoLocation())
603 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
604 DCHECK(kEmitCompilerReadBarrier);
605 }
606
607 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
608
609 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
610 LocationSummary* locations = instruction_->GetLocations();
611 Register ref_reg = ref_.AsRegister<Register>();
612 DCHECK(locations->CanCall());
613 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
614 DCHECK(instruction_->IsInstanceFieldGet() ||
615 instruction_->IsStaticFieldGet() ||
616 instruction_->IsArrayGet() ||
617 instruction_->IsArraySet() ||
618 instruction_->IsLoadClass() ||
619 instruction_->IsLoadString() ||
620 instruction_->IsInstanceOf() ||
621 instruction_->IsCheckCast() ||
622 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
623 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
624 << "Unexpected instruction in read barrier marking slow path: "
625 << instruction_->DebugName();
626
627 __ Bind(GetEntryLabel());
628 // No need to save live registers; it's taken care of by the
629 // entrypoint. Also, there is no need to update the stack mask,
630 // as this runtime call will not trigger a garbage collection.
631 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
632 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
633 (S2 <= ref_reg && ref_reg <= S7) ||
634 (ref_reg == FP)) << ref_reg;
635 // "Compact" slow path, saving two moves.
636 //
637 // Instead of using the standard runtime calling convention (input
638 // and output in A0 and V0 respectively):
639 //
640 // A0 <- ref
641 // V0 <- ReadBarrierMark(A0)
642 // ref <- V0
643 //
644 // we just use rX (the register containing `ref`) as input and output
645 // of a dedicated entrypoint:
646 //
647 // rX <- ReadBarrierMarkRegX(rX)
648 //
649 if (entrypoint_.IsValid()) {
650 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
651 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
652 __ Jalr(entrypoint_.AsRegister<Register>());
653 __ NopIfNoReordering();
654 } else {
655 int32_t entry_point_offset =
656 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
657 // This runtime call does not require a stack map.
658 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
659 instruction_,
660 this,
661 /* direct */ false);
662 }
663 __ B(GetExitLabel());
664 }
665
666 private:
667 // The location (register) of the marked object reference.
668 const Location ref_;
669
670 // The location of the entrypoint if already loaded.
671 const Location entrypoint_;
672
673 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
674};
675
676// Slow path marking an object reference `ref` during a read barrier,
677// and if needed, atomically updating the field `obj.field` in the
678// object `obj` holding this reference after marking (contrary to
679// ReadBarrierMarkSlowPathMIPS above, which never tries to update
680// `obj.field`).
681//
682// This means that after the execution of this slow path, both `ref`
683// and `obj.field` will be up-to-date; i.e., after the flip, both will
684// hold the same to-space reference (unless another thread installed
685// another object reference (different from `ref`) in `obj.field`).
686class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
687 public:
688 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
689 Location ref,
690 Register obj,
691 Location field_offset,
692 Register temp1)
693 : SlowPathCodeMIPS(instruction),
694 ref_(ref),
695 obj_(obj),
696 field_offset_(field_offset),
697 temp1_(temp1) {
698 DCHECK(kEmitCompilerReadBarrier);
699 }
700
701 const char* GetDescription() const OVERRIDE {
702 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
703 }
704
705 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
706 LocationSummary* locations = instruction_->GetLocations();
707 Register ref_reg = ref_.AsRegister<Register>();
708 DCHECK(locations->CanCall());
709 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
710 // This slow path is only used by the UnsafeCASObject intrinsic.
711 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
712 << "Unexpected instruction in read barrier marking and field updating slow path: "
713 << instruction_->DebugName();
714 DCHECK(instruction_->GetLocations()->Intrinsified());
715 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
716 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
717
718 __ Bind(GetEntryLabel());
719
720 // Save the old reference.
721 // Note that we cannot use AT or TMP to save the old reference, as those
722 // are used by the code that follows, but we need the old reference after
723 // the call to the ReadBarrierMarkRegX entry point.
724 DCHECK_NE(temp1_, AT);
725 DCHECK_NE(temp1_, TMP);
726 __ Move(temp1_, ref_reg);
727
728 // No need to save live registers; it's taken care of by the
729 // entrypoint. Also, there is no need to update the stack mask,
730 // as this runtime call will not trigger a garbage collection.
731 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
732 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
733 (S2 <= ref_reg && ref_reg <= S7) ||
734 (ref_reg == FP)) << ref_reg;
735 // "Compact" slow path, saving two moves.
736 //
737 // Instead of using the standard runtime calling convention (input
738 // and output in A0 and V0 respectively):
739 //
740 // A0 <- ref
741 // V0 <- ReadBarrierMark(A0)
742 // ref <- V0
743 //
744 // we just use rX (the register containing `ref`) as input and output
745 // of a dedicated entrypoint:
746 //
747 // rX <- ReadBarrierMarkRegX(rX)
748 //
749 int32_t entry_point_offset =
750 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
751 // This runtime call does not require a stack map.
752 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
753 instruction_,
754 this,
755 /* direct */ false);
756
757 // If the new reference is different from the old reference,
758 // update the field in the holder (`*(obj_ + field_offset_)`).
759 //
760 // Note that this field could also hold a different object, if
761 // another thread had concurrently changed it. In that case, the
762 // the compare-and-set (CAS) loop below would abort, leaving the
763 // field as-is.
764 MipsLabel done;
765 __ Beq(temp1_, ref_reg, &done);
766
767 // Update the the holder's field atomically. This may fail if
768 // mutator updates before us, but it's OK. This is achieved
769 // using a strong compare-and-set (CAS) operation with relaxed
770 // memory synchronization ordering, where the expected value is
771 // the old reference and the desired value is the new reference.
772
773 // Convenience aliases.
774 Register base = obj_;
775 // The UnsafeCASObject intrinsic uses a register pair as field
776 // offset ("long offset"), of which only the low part contains
777 // data.
778 Register offset = field_offset_.AsRegisterPairLow<Register>();
779 Register expected = temp1_;
780 Register value = ref_reg;
781 Register tmp_ptr = TMP; // Pointer to actual memory.
782 Register tmp = AT; // Value in memory.
783
784 __ Addu(tmp_ptr, base, offset);
785
786 if (kPoisonHeapReferences) {
787 __ PoisonHeapReference(expected);
788 // Do not poison `value` if it is the same register as
789 // `expected`, which has just been poisoned.
790 if (value != expected) {
791 __ PoisonHeapReference(value);
792 }
793 }
794
795 // do {
796 // tmp = [r_ptr] - expected;
797 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
798
799 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
800 MipsLabel loop_head, exit_loop;
801 __ Bind(&loop_head);
802 if (is_r6) {
803 __ LlR6(tmp, tmp_ptr);
804 } else {
805 __ LlR2(tmp, tmp_ptr);
806 }
807 __ Bne(tmp, expected, &exit_loop);
808 __ Move(tmp, value);
809 if (is_r6) {
810 __ ScR6(tmp, tmp_ptr);
811 } else {
812 __ ScR2(tmp, tmp_ptr);
813 }
814 __ Beqz(tmp, &loop_head);
815 __ Bind(&exit_loop);
816
817 if (kPoisonHeapReferences) {
818 __ UnpoisonHeapReference(expected);
819 // Do not unpoison `value` if it is the same register as
820 // `expected`, which has just been unpoisoned.
821 if (value != expected) {
822 __ UnpoisonHeapReference(value);
823 }
824 }
825
826 __ Bind(&done);
827 __ B(GetExitLabel());
828 }
829
830 private:
831 // The location (register) of the marked object reference.
832 const Location ref_;
833 // The register containing the object holding the marked object reference field.
834 const Register obj_;
835 // The location of the offset of the marked reference field within `obj_`.
836 Location field_offset_;
837
838 const Register temp1_;
839
840 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
841};
842
843// Slow path generating a read barrier for a heap reference.
844class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
845 public:
846 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
847 Location out,
848 Location ref,
849 Location obj,
850 uint32_t offset,
851 Location index)
852 : SlowPathCodeMIPS(instruction),
853 out_(out),
854 ref_(ref),
855 obj_(obj),
856 offset_(offset),
857 index_(index) {
858 DCHECK(kEmitCompilerReadBarrier);
859 // If `obj` is equal to `out` or `ref`, it means the initial object
860 // has been overwritten by (or after) the heap object reference load
861 // to be instrumented, e.g.:
862 //
863 // __ LoadFromOffset(kLoadWord, out, out, offset);
864 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
865 //
866 // In that case, we have lost the information about the original
867 // object, and the emitted read barrier cannot work properly.
868 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
869 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
870 }
871
872 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
873 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
874 LocationSummary* locations = instruction_->GetLocations();
875 Register reg_out = out_.AsRegister<Register>();
876 DCHECK(locations->CanCall());
877 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
878 DCHECK(instruction_->IsInstanceFieldGet() ||
879 instruction_->IsStaticFieldGet() ||
880 instruction_->IsArrayGet() ||
881 instruction_->IsInstanceOf() ||
882 instruction_->IsCheckCast() ||
883 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
884 << "Unexpected instruction in read barrier for heap reference slow path: "
885 << instruction_->DebugName();
886
887 __ Bind(GetEntryLabel());
888 SaveLiveRegisters(codegen, locations);
889
890 // We may have to change the index's value, but as `index_` is a
891 // constant member (like other "inputs" of this slow path),
892 // introduce a copy of it, `index`.
893 Location index = index_;
894 if (index_.IsValid()) {
895 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
896 if (instruction_->IsArrayGet()) {
897 // Compute the actual memory offset and store it in `index`.
898 Register index_reg = index_.AsRegister<Register>();
899 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
900 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
901 // We are about to change the value of `index_reg` (see the
902 // calls to art::mips::MipsAssembler::Sll and
903 // art::mips::MipsAssembler::Addiu32 below), but it has
904 // not been saved by the previous call to
905 // art::SlowPathCode::SaveLiveRegisters, as it is a
906 // callee-save register --
907 // art::SlowPathCode::SaveLiveRegisters does not consider
908 // callee-save registers, as it has been designed with the
909 // assumption that callee-save registers are supposed to be
910 // handled by the called function. So, as a callee-save
911 // register, `index_reg` _would_ eventually be saved onto
912 // the stack, but it would be too late: we would have
913 // changed its value earlier. Therefore, we manually save
914 // it here into another freely available register,
915 // `free_reg`, chosen of course among the caller-save
916 // registers (as a callee-save `free_reg` register would
917 // exhibit the same problem).
918 //
919 // Note we could have requested a temporary register from
920 // the register allocator instead; but we prefer not to, as
921 // this is a slow path, and we know we can find a
922 // caller-save register that is available.
923 Register free_reg = FindAvailableCallerSaveRegister(codegen);
924 __ Move(free_reg, index_reg);
925 index_reg = free_reg;
926 index = Location::RegisterLocation(index_reg);
927 } else {
928 // The initial register stored in `index_` has already been
929 // saved in the call to art::SlowPathCode::SaveLiveRegisters
930 // (as it is not a callee-save register), so we can freely
931 // use it.
932 }
933 // Shifting the index value contained in `index_reg` by the scale
934 // factor (2) cannot overflow in practice, as the runtime is
935 // unable to allocate object arrays with a size larger than
936 // 2^26 - 1 (that is, 2^28 - 4 bytes).
937 __ Sll(index_reg, index_reg, TIMES_4);
938 static_assert(
939 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
940 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
941 __ Addiu32(index_reg, index_reg, offset_);
942 } else {
943 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
944 // intrinsics, `index_` is not shifted by a scale factor of 2
945 // (as in the case of ArrayGet), as it is actually an offset
946 // to an object field within an object.
947 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
948 DCHECK(instruction_->GetLocations()->Intrinsified());
949 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
950 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
951 << instruction_->AsInvoke()->GetIntrinsic();
952 DCHECK_EQ(offset_, 0U);
953 DCHECK(index_.IsRegisterPair());
954 // UnsafeGet's offset location is a register pair, the low
955 // part contains the correct offset.
956 index = index_.ToLow();
957 }
958 }
959
960 // We're moving two or three locations to locations that could
961 // overlap, so we need a parallel move resolver.
962 InvokeRuntimeCallingConvention calling_convention;
963 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
964 parallel_move.AddMove(ref_,
965 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
966 Primitive::kPrimNot,
967 nullptr);
968 parallel_move.AddMove(obj_,
969 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
970 Primitive::kPrimNot,
971 nullptr);
972 if (index.IsValid()) {
973 parallel_move.AddMove(index,
974 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
975 Primitive::kPrimInt,
976 nullptr);
977 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
978 } else {
979 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
980 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
981 }
982 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
983 instruction_,
984 instruction_->GetDexPc(),
985 this);
986 CheckEntrypointTypes<
987 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
988 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
989
990 RestoreLiveRegisters(codegen, locations);
991 __ B(GetExitLabel());
992 }
993
994 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
995
996 private:
997 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
998 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
999 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1000 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1001 if (i != ref &&
1002 i != obj &&
1003 !codegen->IsCoreCalleeSaveRegister(i) &&
1004 !codegen->IsBlockedCoreRegister(i)) {
1005 return static_cast<Register>(i);
1006 }
1007 }
1008 // We shall never fail to find a free caller-save register, as
1009 // there are more than two core caller-save registers on MIPS
1010 // (meaning it is possible to find one which is different from
1011 // `ref` and `obj`).
1012 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1013 LOG(FATAL) << "Could not find a free caller-save register";
1014 UNREACHABLE();
1015 }
1016
1017 const Location out_;
1018 const Location ref_;
1019 const Location obj_;
1020 const uint32_t offset_;
1021 // An additional location containing an index to an array.
1022 // Only used for HArrayGet and the UnsafeGetObject &
1023 // UnsafeGetObjectVolatile intrinsics.
1024 const Location index_;
1025
1026 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1027};
1028
1029// Slow path generating a read barrier for a GC root.
1030class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1031 public:
1032 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1033 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1034 DCHECK(kEmitCompilerReadBarrier);
1035 }
1036
1037 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1038 LocationSummary* locations = instruction_->GetLocations();
1039 Register reg_out = out_.AsRegister<Register>();
1040 DCHECK(locations->CanCall());
1041 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1042 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1043 << "Unexpected instruction in read barrier for GC root slow path: "
1044 << instruction_->DebugName();
1045
1046 __ Bind(GetEntryLabel());
1047 SaveLiveRegisters(codegen, locations);
1048
1049 InvokeRuntimeCallingConvention calling_convention;
1050 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
1051 mips_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), root_);
1052 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1053 instruction_,
1054 instruction_->GetDexPc(),
1055 this);
1056 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
1057 mips_codegen->Move32(out_, calling_convention.GetReturnLocation(Primitive::kPrimNot));
1058
1059 RestoreLiveRegisters(codegen, locations);
1060 __ B(GetExitLabel());
1061 }
1062
1063 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1064
1065 private:
1066 const Location out_;
1067 const Location root_;
1068
1069 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1070};
1071
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001072CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1073 const MipsInstructionSetFeatures& isa_features,
1074 const CompilerOptions& compiler_options,
1075 OptimizingCompilerStats* stats)
1076 : CodeGenerator(graph,
1077 kNumberOfCoreRegisters,
1078 kNumberOfFRegisters,
1079 kNumberOfRegisterPairs,
1080 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1081 arraysize(kCoreCalleeSaves)),
1082 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1083 arraysize(kFpuCalleeSaves)),
1084 compiler_options,
1085 stats),
1086 block_labels_(nullptr),
1087 location_builder_(graph, this),
1088 instruction_visitor_(graph, this),
1089 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001090 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001091 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001092 uint32_literals_(std::less<uint32_t>(),
1093 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001094 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001095 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001096 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001097 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001098 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001099 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1100 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001101 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001102 // Save RA (containing the return address) to mimic Quick.
1103 AddAllocatedRegister(Location::RegisterLocation(RA));
1104}
1105
1106#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001107// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1108#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001109#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001110
1111void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1112 // Ensure that we fix up branches.
1113 __ FinalizeCode();
1114
1115 // Adjust native pc offsets in stack maps.
1116 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001117 uint32_t old_position =
1118 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001119 uint32_t new_position = __ GetAdjustedPosition(old_position);
1120 DCHECK_GE(new_position, old_position);
1121 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1122 }
1123
1124 // Adjust pc offsets for the disassembly information.
1125 if (disasm_info_ != nullptr) {
1126 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1127 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1128 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1129 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1130 it.second.start = __ GetAdjustedPosition(it.second.start);
1131 it.second.end = __ GetAdjustedPosition(it.second.end);
1132 }
1133 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1134 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1135 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1136 }
1137 }
1138
1139 CodeGenerator::Finalize(allocator);
1140}
1141
1142MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1143 return codegen_->GetAssembler();
1144}
1145
1146void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1147 DCHECK_LT(index, moves_.size());
1148 MoveOperands* move = moves_[index];
1149 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1150}
1151
1152void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1153 DCHECK_LT(index, moves_.size());
1154 MoveOperands* move = moves_[index];
1155 Primitive::Type type = move->GetType();
1156 Location loc1 = move->GetDestination();
1157 Location loc2 = move->GetSource();
1158
1159 DCHECK(!loc1.IsConstant());
1160 DCHECK(!loc2.IsConstant());
1161
1162 if (loc1.Equals(loc2)) {
1163 return;
1164 }
1165
1166 if (loc1.IsRegister() && loc2.IsRegister()) {
1167 // Swap 2 GPRs.
1168 Register r1 = loc1.AsRegister<Register>();
1169 Register r2 = loc2.AsRegister<Register>();
1170 __ Move(TMP, r2);
1171 __ Move(r2, r1);
1172 __ Move(r1, TMP);
1173 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1174 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1175 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1176 if (type == Primitive::kPrimFloat) {
1177 __ MovS(FTMP, f2);
1178 __ MovS(f2, f1);
1179 __ MovS(f1, FTMP);
1180 } else {
1181 DCHECK_EQ(type, Primitive::kPrimDouble);
1182 __ MovD(FTMP, f2);
1183 __ MovD(f2, f1);
1184 __ MovD(f1, FTMP);
1185 }
1186 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1187 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1188 // Swap FPR and GPR.
1189 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1190 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1191 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001192 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001193 __ Move(TMP, r2);
1194 __ Mfc1(r2, f1);
1195 __ Mtc1(TMP, f1);
1196 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1197 // Swap 2 GPR register pairs.
1198 Register r1 = loc1.AsRegisterPairLow<Register>();
1199 Register r2 = loc2.AsRegisterPairLow<Register>();
1200 __ Move(TMP, r2);
1201 __ Move(r2, r1);
1202 __ Move(r1, TMP);
1203 r1 = loc1.AsRegisterPairHigh<Register>();
1204 r2 = loc2.AsRegisterPairHigh<Register>();
1205 __ Move(TMP, r2);
1206 __ Move(r2, r1);
1207 __ Move(r1, TMP);
1208 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1209 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1210 // Swap FPR and GPR register pair.
1211 DCHECK_EQ(type, Primitive::kPrimDouble);
1212 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1213 : loc2.AsFpuRegister<FRegister>();
1214 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1215 : loc2.AsRegisterPairLow<Register>();
1216 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1217 : loc2.AsRegisterPairHigh<Register>();
1218 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1219 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1220 // unpredictable and the following mfch1 will fail.
1221 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001222 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001223 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001224 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001225 __ Move(r2_l, TMP);
1226 __ Move(r2_h, AT);
1227 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1228 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1229 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1230 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001231 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1232 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001233 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1234 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001235 __ Move(TMP, reg);
1236 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1237 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1238 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1239 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1240 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1241 : loc2.AsRegisterPairLow<Register>();
1242 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1243 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001244 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001245 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1246 : loc2.GetHighStackIndex(kMipsWordSize);
1247 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001248 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001249 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001250 __ Move(TMP, reg_h);
1251 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1252 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001253 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1254 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1255 : loc2.AsFpuRegister<FRegister>();
1256 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1257 if (type == Primitive::kPrimFloat) {
1258 __ MovS(FTMP, reg);
1259 __ LoadSFromOffset(reg, SP, offset);
1260 __ StoreSToOffset(FTMP, SP, offset);
1261 } else {
1262 DCHECK_EQ(type, Primitive::kPrimDouble);
1263 __ MovD(FTMP, reg);
1264 __ LoadDFromOffset(reg, SP, offset);
1265 __ StoreDToOffset(FTMP, SP, offset);
1266 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001267 } else {
1268 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1269 }
1270}
1271
1272void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1273 __ Pop(static_cast<Register>(reg));
1274}
1275
1276void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1277 __ Push(static_cast<Register>(reg));
1278}
1279
1280void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1281 // Allocate a scratch register other than TMP, if available.
1282 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1283 // automatically unspilled when the scratch scope object is destroyed).
1284 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1285 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1286 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1287 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1288 __ LoadFromOffset(kLoadWord,
1289 Register(ensure_scratch.GetRegister()),
1290 SP,
1291 index1 + stack_offset);
1292 __ LoadFromOffset(kLoadWord,
1293 TMP,
1294 SP,
1295 index2 + stack_offset);
1296 __ StoreToOffset(kStoreWord,
1297 Register(ensure_scratch.GetRegister()),
1298 SP,
1299 index2 + stack_offset);
1300 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1301 }
1302}
1303
Alexey Frunze73296a72016-06-03 22:51:46 -07001304void CodeGeneratorMIPS::ComputeSpillMask() {
1305 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1306 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1307 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1308 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1309 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1310 // within the stack frame.
1311 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1312 core_spill_mask_ |= (1 << ZERO);
1313 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001314}
1315
1316bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001317 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001318 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1319 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1320 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001321 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001322}
1323
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001324static dwarf::Reg DWARFReg(Register reg) {
1325 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1326}
1327
1328// TODO: mapping of floating-point registers to DWARF.
1329
1330void CodeGeneratorMIPS::GenerateFrameEntry() {
1331 __ Bind(&frame_entry_label_);
1332
1333 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1334
1335 if (do_overflow_check) {
1336 __ LoadFromOffset(kLoadWord,
1337 ZERO,
1338 SP,
1339 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1340 RecordPcInfo(nullptr, 0);
1341 }
1342
1343 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001344 CHECK_EQ(fpu_spill_mask_, 0u);
1345 CHECK_EQ(core_spill_mask_, 1u << RA);
1346 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001347 return;
1348 }
1349
1350 // Make sure the frame size isn't unreasonably large.
1351 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1352 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1353 }
1354
1355 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001356
Alexey Frunze73296a72016-06-03 22:51:46 -07001357 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001358 __ IncreaseFrameSize(ofs);
1359
Alexey Frunze73296a72016-06-03 22:51:46 -07001360 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1361 Register reg = static_cast<Register>(MostSignificantBit(mask));
1362 mask ^= 1u << reg;
1363 ofs -= kMipsWordSize;
1364 // The ZERO register is only included for alignment.
1365 if (reg != ZERO) {
1366 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001367 __ cfi().RelOffset(DWARFReg(reg), ofs);
1368 }
1369 }
1370
Alexey Frunze73296a72016-06-03 22:51:46 -07001371 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1372 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1373 mask ^= 1u << reg;
1374 ofs -= kMipsDoublewordSize;
1375 __ StoreDToOffset(reg, SP, ofs);
1376 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001377 }
1378
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001379 // Save the current method if we need it. Note that we do not
1380 // do this in HCurrentMethod, as the instruction might have been removed
1381 // in the SSA graph.
1382 if (RequiresCurrentMethod()) {
1383 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1384 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001385
1386 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1387 // Initialize should deoptimize flag to 0.
1388 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1389 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001390}
1391
1392void CodeGeneratorMIPS::GenerateFrameExit() {
1393 __ cfi().RememberState();
1394
1395 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001396 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001397
Alexey Frunze73296a72016-06-03 22:51:46 -07001398 // For better instruction scheduling restore RA before other registers.
1399 uint32_t ofs = GetFrameSize();
1400 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1401 Register reg = static_cast<Register>(MostSignificantBit(mask));
1402 mask ^= 1u << reg;
1403 ofs -= kMipsWordSize;
1404 // The ZERO register is only included for alignment.
1405 if (reg != ZERO) {
1406 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001407 __ cfi().Restore(DWARFReg(reg));
1408 }
1409 }
1410
Alexey Frunze73296a72016-06-03 22:51:46 -07001411 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1412 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1413 mask ^= 1u << reg;
1414 ofs -= kMipsDoublewordSize;
1415 __ LoadDFromOffset(reg, SP, ofs);
1416 // TODO: __ cfi().Restore(DWARFReg(reg));
1417 }
1418
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001419 size_t frame_size = GetFrameSize();
1420 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1421 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1422 bool reordering = __ SetReorder(false);
1423 if (exchange) {
1424 __ Jr(RA);
1425 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1426 } else {
1427 __ DecreaseFrameSize(frame_size);
1428 __ Jr(RA);
1429 __ Nop(); // In delay slot.
1430 }
1431 __ SetReorder(reordering);
1432 } else {
1433 __ Jr(RA);
1434 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001435 }
1436
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001437 __ cfi().RestoreState();
1438 __ cfi().DefCFAOffset(GetFrameSize());
1439}
1440
1441void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1442 __ Bind(GetLabelOf(block));
1443}
1444
1445void CodeGeneratorMIPS::MoveLocation(Location dst, Location src, Primitive::Type dst_type) {
1446 if (src.Equals(dst)) {
1447 return;
1448 }
1449
1450 if (src.IsConstant()) {
1451 MoveConstant(dst, src.GetConstant());
1452 } else {
1453 if (Primitive::Is64BitType(dst_type)) {
1454 Move64(dst, src);
1455 } else {
1456 Move32(dst, src);
1457 }
1458 }
1459}
1460
1461void CodeGeneratorMIPS::Move32(Location destination, Location source) {
1462 if (source.Equals(destination)) {
1463 return;
1464 }
1465
1466 if (destination.IsRegister()) {
1467 if (source.IsRegister()) {
1468 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1469 } else if (source.IsFpuRegister()) {
1470 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1471 } else {
1472 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1473 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
1474 }
1475 } else if (destination.IsFpuRegister()) {
1476 if (source.IsRegister()) {
1477 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1478 } else if (source.IsFpuRegister()) {
1479 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1480 } else {
1481 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1482 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1483 }
1484 } else {
1485 DCHECK(destination.IsStackSlot()) << destination;
1486 if (source.IsRegister()) {
1487 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex());
1488 } else if (source.IsFpuRegister()) {
1489 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1490 } else {
1491 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1492 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1493 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex());
1494 }
1495 }
1496}
1497
1498void CodeGeneratorMIPS::Move64(Location destination, Location source) {
1499 if (source.Equals(destination)) {
1500 return;
1501 }
1502
1503 if (destination.IsRegisterPair()) {
1504 if (source.IsRegisterPair()) {
1505 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1506 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1507 } else if (source.IsFpuRegister()) {
1508 Register dst_high = destination.AsRegisterPairHigh<Register>();
1509 Register dst_low = destination.AsRegisterPairLow<Register>();
1510 FRegister src = source.AsFpuRegister<FRegister>();
1511 __ Mfc1(dst_low, src);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001512 __ MoveFromFpuHigh(dst_high, src);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001513 } else {
1514 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1515 int32_t off = source.GetStackIndex();
1516 Register r = destination.AsRegisterPairLow<Register>();
1517 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1518 }
1519 } else if (destination.IsFpuRegister()) {
1520 if (source.IsRegisterPair()) {
1521 FRegister dst = destination.AsFpuRegister<FRegister>();
1522 Register src_high = source.AsRegisterPairHigh<Register>();
1523 Register src_low = source.AsRegisterPairLow<Register>();
1524 __ Mtc1(src_low, dst);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001525 __ MoveToFpuHigh(src_high, dst);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001526 } else if (source.IsFpuRegister()) {
1527 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1528 } else {
1529 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1530 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1531 }
1532 } else {
1533 DCHECK(destination.IsDoubleStackSlot()) << destination;
1534 int32_t off = destination.GetStackIndex();
1535 if (source.IsRegisterPair()) {
1536 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, off);
1537 } else if (source.IsFpuRegister()) {
1538 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, off);
1539 } else {
1540 DCHECK(source.IsDoubleStackSlot()) << "Cannot move from " << source << " to " << destination;
1541 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1542 __ StoreToOffset(kStoreWord, TMP, SP, off);
1543 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1544 __ StoreToOffset(kStoreWord, TMP, SP, off + 4);
1545 }
1546 }
1547}
1548
1549void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1550 if (c->IsIntConstant() || c->IsNullConstant()) {
1551 // Move 32 bit constant.
1552 int32_t value = GetInt32ValueOf(c);
1553 if (destination.IsRegister()) {
1554 Register dst = destination.AsRegister<Register>();
1555 __ LoadConst32(dst, value);
1556 } else {
1557 DCHECK(destination.IsStackSlot())
1558 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001559 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001560 }
1561 } else if (c->IsLongConstant()) {
1562 // Move 64 bit constant.
1563 int64_t value = GetInt64ValueOf(c);
1564 if (destination.IsRegisterPair()) {
1565 Register r_h = destination.AsRegisterPairHigh<Register>();
1566 Register r_l = destination.AsRegisterPairLow<Register>();
1567 __ LoadConst64(r_h, r_l, value);
1568 } else {
1569 DCHECK(destination.IsDoubleStackSlot())
1570 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001571 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001572 }
1573 } else if (c->IsFloatConstant()) {
1574 // Move 32 bit float constant.
1575 int32_t value = GetInt32ValueOf(c);
1576 if (destination.IsFpuRegister()) {
1577 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1578 } else {
1579 DCHECK(destination.IsStackSlot())
1580 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001581 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001582 }
1583 } else {
1584 // Move 64 bit double constant.
1585 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1586 int64_t value = GetInt64ValueOf(c);
1587 if (destination.IsFpuRegister()) {
1588 FRegister fd = destination.AsFpuRegister<FRegister>();
1589 __ LoadDConst64(fd, value, TMP);
1590 } else {
1591 DCHECK(destination.IsDoubleStackSlot())
1592 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001593 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001594 }
1595 }
1596}
1597
1598void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1599 DCHECK(destination.IsRegister());
1600 Register dst = destination.AsRegister<Register>();
1601 __ LoadConst32(dst, value);
1602}
1603
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001604void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1605 if (location.IsRegister()) {
1606 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001607 } else if (location.IsRegisterPair()) {
1608 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1609 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001610 } else {
1611 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1612 }
1613}
1614
Vladimir Markoaad75c62016-10-03 08:46:48 +00001615template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1616inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1617 const ArenaDeque<PcRelativePatchInfo>& infos,
1618 ArenaVector<LinkerPatch>* linker_patches) {
1619 for (const PcRelativePatchInfo& info : infos) {
1620 const DexFile& dex_file = info.target_dex_file;
1621 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001622 DCHECK(info.label.IsBound());
1623 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001624 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1625 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001626 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1627 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1628 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001629 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001630 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001631 }
1632}
1633
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001634void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1635 DCHECK(linker_patches->empty());
1636 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001637 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001638 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001639 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001640 type_bss_entry_patches_.size() +
1641 pc_relative_string_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001642 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001643 if (GetCompilerOptions().IsBootImage()) {
1644 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001645 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001646 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1647 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001648 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1649 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001650 } else {
1651 DCHECK(pc_relative_method_patches_.empty());
1652 DCHECK(pc_relative_type_patches_.empty());
1653 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
1654 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001655 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001656 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1657 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001658 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1659 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001660 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001661}
1662
Vladimir Marko65979462017-05-19 17:25:12 +01001663CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001664 MethodReference target_method,
1665 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001666 return NewPcRelativePatch(*target_method.dex_file,
1667 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001668 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001669 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001670}
1671
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001672CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001673 MethodReference target_method,
1674 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001675 return NewPcRelativePatch(*target_method.dex_file,
1676 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001677 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001678 &method_bss_entry_patches_);
1679}
1680
Alexey Frunze06a46c42016-07-19 15:00:40 -07001681CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001682 const DexFile& dex_file,
1683 dex::TypeIndex type_index,
1684 const PcRelativePatchInfo* info_high) {
1685 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001686}
1687
Vladimir Marko1998cd02017-01-13 13:02:58 +00001688CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001689 const DexFile& dex_file,
1690 dex::TypeIndex type_index,
1691 const PcRelativePatchInfo* info_high) {
1692 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001693}
1694
Vladimir Marko65979462017-05-19 17:25:12 +01001695CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001696 const DexFile& dex_file,
1697 dex::StringIndex string_index,
1698 const PcRelativePatchInfo* info_high) {
1699 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001700}
1701
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001702CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001703 const DexFile& dex_file,
1704 uint32_t offset_or_index,
1705 const PcRelativePatchInfo* info_high,
1706 ArenaDeque<PcRelativePatchInfo>* patches) {
1707 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001708 return &patches->back();
1709}
1710
Alexey Frunze06a46c42016-07-19 15:00:40 -07001711Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1712 return map->GetOrCreate(
1713 value,
1714 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1715}
1716
Alexey Frunze06a46c42016-07-19 15:00:40 -07001717Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001718 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001719}
1720
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001721void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001722 Register out,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001723 Register base,
1724 PcRelativePatchInfo* info_low) {
1725 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001726 DCHECK_NE(out, base);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001727 if (GetInstructionSetFeatures().IsR6()) {
1728 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001729 __ Bind(&info_high->label);
1730 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001731 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001732 __ Auipc(out, /* placeholder */ 0x1234);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001733 } else {
1734 // If base is ZERO, emit NAL to obtain the actual base.
1735 if (base == ZERO) {
1736 // Generate a dummy PC-relative call to obtain PC.
1737 __ Nal();
1738 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001739 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001740 __ Lui(out, /* placeholder */ 0x1234);
1741 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1742 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1743 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001744 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001745 }
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001746 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001747 __ Addu(out, out, (base == ZERO) ? RA : base);
1748 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001749 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001750 // offset to `out` (e.g. lw, jialc, addiu).
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001751 DCHECK_EQ(info_low->patch_info_high, info_high);
1752 __ Bind(&info_low->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001753}
1754
Alexey Frunze627c1a02017-01-30 19:28:14 -08001755CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1756 const DexFile& dex_file,
1757 dex::StringIndex dex_index,
1758 Handle<mirror::String> handle) {
1759 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1760 reinterpret_cast64<uint64_t>(handle.GetReference()));
1761 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1762 return &jit_string_patches_.back();
1763}
1764
1765CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1766 const DexFile& dex_file,
1767 dex::TypeIndex dex_index,
1768 Handle<mirror::Class> handle) {
1769 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1770 reinterpret_cast64<uint64_t>(handle.GetReference()));
1771 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1772 return &jit_class_patches_.back();
1773}
1774
1775void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1776 const uint8_t* roots_data,
1777 const CodeGeneratorMIPS::JitPatchInfo& info,
1778 uint64_t index_in_table) const {
1779 uint32_t literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1780 uintptr_t address =
1781 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1782 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1783 // lui reg, addr32_high
1784 DCHECK_EQ(code[literal_offset + 0], 0x34);
1785 DCHECK_EQ(code[literal_offset + 1], 0x12);
1786 DCHECK_EQ((code[literal_offset + 2] & 0xE0), 0x00);
1787 DCHECK_EQ(code[literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001788 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001789 DCHECK_EQ(code[literal_offset + 4], 0x78);
1790 DCHECK_EQ(code[literal_offset + 5], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001791 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001792 // lui reg, addr32_high
1793 code[literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1794 code[literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001795 // instr reg, reg, addr32_low
Alexey Frunze627c1a02017-01-30 19:28:14 -08001796 code[literal_offset + 4] = static_cast<uint8_t>(addr32 >> 0);
1797 code[literal_offset + 5] = static_cast<uint8_t>(addr32 >> 8);
1798}
1799
1800void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1801 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001802 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1803 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001804 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001805 uint64_t index_in_table = it->second;
1806 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001807 }
1808 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001809 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1810 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001811 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001812 uint64_t index_in_table = it->second;
1813 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001814 }
1815}
1816
Goran Jakovljevice114da22016-12-26 14:21:43 +01001817void CodeGeneratorMIPS::MarkGCCard(Register object,
1818 Register value,
1819 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001820 MipsLabel done;
1821 Register card = AT;
1822 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001823 if (value_can_be_null) {
1824 __ Beqz(value, &done);
1825 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001826 __ LoadFromOffset(kLoadWord,
1827 card,
1828 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001829 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001830 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1831 __ Addu(temp, card, temp);
1832 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001833 if (value_can_be_null) {
1834 __ Bind(&done);
1835 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001836}
1837
David Brazdil58282f42016-01-14 12:45:10 +00001838void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001839 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1840 blocked_core_registers_[ZERO] = true;
1841 blocked_core_registers_[K0] = true;
1842 blocked_core_registers_[K1] = true;
1843 blocked_core_registers_[GP] = true;
1844 blocked_core_registers_[SP] = true;
1845 blocked_core_registers_[RA] = true;
1846
1847 // AT and TMP(T8) are used as temporary/scratch registers
1848 // (similar to how AT is used by MIPS assemblers).
1849 blocked_core_registers_[AT] = true;
1850 blocked_core_registers_[TMP] = true;
1851 blocked_fpu_registers_[FTMP] = true;
1852
1853 // Reserve suspend and thread registers.
1854 blocked_core_registers_[S0] = true;
1855 blocked_core_registers_[TR] = true;
1856
1857 // Reserve T9 for function calls
1858 blocked_core_registers_[T9] = true;
1859
1860 // Reserve odd-numbered FPU registers.
1861 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1862 blocked_fpu_registers_[i] = true;
1863 }
1864
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001865 if (GetGraph()->IsDebuggable()) {
1866 // Stubs do not save callee-save floating point registers. If the graph
1867 // is debuggable, we need to deal with these registers differently. For
1868 // now, just block them.
1869 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1870 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1871 }
1872 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001873}
1874
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001875size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1876 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1877 return kMipsWordSize;
1878}
1879
1880size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1881 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1882 return kMipsWordSize;
1883}
1884
1885size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1886 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1887 return kMipsDoublewordSize;
1888}
1889
1890size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
1891 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1892 return kMipsDoublewordSize;
1893}
1894
1895void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001896 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001897}
1898
1899void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001900 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001901}
1902
Serban Constantinescufca16662016-07-14 09:21:59 +01001903constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1904
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001905void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1906 HInstruction* instruction,
1907 uint32_t dex_pc,
1908 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001909 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001910 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1911 IsDirectEntrypoint(entrypoint));
1912 if (EntrypointRequiresStackMap(entrypoint)) {
1913 RecordPcInfo(instruction, dex_pc, slow_path);
1914 }
1915}
1916
1917void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1918 HInstruction* instruction,
1919 SlowPathCode* slow_path,
1920 bool direct) {
1921 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1922 GenerateInvokeRuntime(entry_point_offset, direct);
1923}
1924
1925void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001926 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001927 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001928 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001929 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001930 // Reserve argument space on stack (for $a0-$a3) for
1931 // entrypoints that directly reference native implementations.
1932 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001933 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001934 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001935 } else {
1936 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001937 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001938 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001939}
1940
1941void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1942 Register class_reg) {
1943 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1944 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1945 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1946 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1947 __ Sync(0);
1948 __ Bind(slow_path->GetExitLabel());
1949}
1950
1951void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1952 __ Sync(0); // Only stype 0 is supported.
1953}
1954
1955void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1956 HBasicBlock* successor) {
1957 SuspendCheckSlowPathMIPS* slow_path =
1958 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1959 codegen_->AddSlowPath(slow_path);
1960
1961 __ LoadFromOffset(kLoadUnsignedHalfword,
1962 TMP,
1963 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001964 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001965 if (successor == nullptr) {
1966 __ Bnez(TMP, slow_path->GetEntryLabel());
1967 __ Bind(slow_path->GetReturnLabel());
1968 } else {
1969 __ Beqz(TMP, codegen_->GetLabelOf(successor));
1970 __ B(slow_path->GetEntryLabel());
1971 // slow_path will return to GetLabelOf(successor).
1972 }
1973}
1974
1975InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
1976 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08001977 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001978 assembler_(codegen->GetAssembler()),
1979 codegen_(codegen) {}
1980
1981void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
1982 DCHECK_EQ(instruction->InputCount(), 2U);
1983 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1984 Primitive::Type type = instruction->GetResultType();
1985 switch (type) {
1986 case Primitive::kPrimInt: {
1987 locations->SetInAt(0, Location::RequiresRegister());
1988 HInstruction* right = instruction->InputAt(1);
1989 bool can_use_imm = false;
1990 if (right->IsConstant()) {
1991 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
1992 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
1993 can_use_imm = IsUint<16>(imm);
1994 } else if (instruction->IsAdd()) {
1995 can_use_imm = IsInt<16>(imm);
1996 } else {
1997 DCHECK(instruction->IsSub());
1998 can_use_imm = IsInt<16>(-imm);
1999 }
2000 }
2001 if (can_use_imm)
2002 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
2003 else
2004 locations->SetInAt(1, Location::RequiresRegister());
2005 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2006 break;
2007 }
2008
2009 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002010 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002011 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2012 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002013 break;
2014 }
2015
2016 case Primitive::kPrimFloat:
2017 case Primitive::kPrimDouble:
2018 DCHECK(instruction->IsAdd() || instruction->IsSub());
2019 locations->SetInAt(0, Location::RequiresFpuRegister());
2020 locations->SetInAt(1, Location::RequiresFpuRegister());
2021 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2022 break;
2023
2024 default:
2025 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2026 }
2027}
2028
2029void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2030 Primitive::Type type = instruction->GetType();
2031 LocationSummary* locations = instruction->GetLocations();
2032
2033 switch (type) {
2034 case Primitive::kPrimInt: {
2035 Register dst = locations->Out().AsRegister<Register>();
2036 Register lhs = locations->InAt(0).AsRegister<Register>();
2037 Location rhs_location = locations->InAt(1);
2038
2039 Register rhs_reg = ZERO;
2040 int32_t rhs_imm = 0;
2041 bool use_imm = rhs_location.IsConstant();
2042 if (use_imm) {
2043 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2044 } else {
2045 rhs_reg = rhs_location.AsRegister<Register>();
2046 }
2047
2048 if (instruction->IsAnd()) {
2049 if (use_imm)
2050 __ Andi(dst, lhs, rhs_imm);
2051 else
2052 __ And(dst, lhs, rhs_reg);
2053 } else if (instruction->IsOr()) {
2054 if (use_imm)
2055 __ Ori(dst, lhs, rhs_imm);
2056 else
2057 __ Or(dst, lhs, rhs_reg);
2058 } else if (instruction->IsXor()) {
2059 if (use_imm)
2060 __ Xori(dst, lhs, rhs_imm);
2061 else
2062 __ Xor(dst, lhs, rhs_reg);
2063 } else if (instruction->IsAdd()) {
2064 if (use_imm)
2065 __ Addiu(dst, lhs, rhs_imm);
2066 else
2067 __ Addu(dst, lhs, rhs_reg);
2068 } else {
2069 DCHECK(instruction->IsSub());
2070 if (use_imm)
2071 __ Addiu(dst, lhs, -rhs_imm);
2072 else
2073 __ Subu(dst, lhs, rhs_reg);
2074 }
2075 break;
2076 }
2077
2078 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002079 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2080 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2081 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2082 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002083 Location rhs_location = locations->InAt(1);
2084 bool use_imm = rhs_location.IsConstant();
2085 if (!use_imm) {
2086 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2087 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2088 if (instruction->IsAnd()) {
2089 __ And(dst_low, lhs_low, rhs_low);
2090 __ And(dst_high, lhs_high, rhs_high);
2091 } else if (instruction->IsOr()) {
2092 __ Or(dst_low, lhs_low, rhs_low);
2093 __ Or(dst_high, lhs_high, rhs_high);
2094 } else if (instruction->IsXor()) {
2095 __ Xor(dst_low, lhs_low, rhs_low);
2096 __ Xor(dst_high, lhs_high, rhs_high);
2097 } else if (instruction->IsAdd()) {
2098 if (lhs_low == rhs_low) {
2099 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2100 __ Slt(TMP, lhs_low, ZERO);
2101 __ Addu(dst_low, lhs_low, rhs_low);
2102 } else {
2103 __ Addu(dst_low, lhs_low, rhs_low);
2104 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2105 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2106 }
2107 __ Addu(dst_high, lhs_high, rhs_high);
2108 __ Addu(dst_high, dst_high, TMP);
2109 } else {
2110 DCHECK(instruction->IsSub());
2111 __ Sltu(TMP, lhs_low, rhs_low);
2112 __ Subu(dst_low, lhs_low, rhs_low);
2113 __ Subu(dst_high, lhs_high, rhs_high);
2114 __ Subu(dst_high, dst_high, TMP);
2115 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002116 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002117 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2118 if (instruction->IsOr()) {
2119 uint32_t low = Low32Bits(value);
2120 uint32_t high = High32Bits(value);
2121 if (IsUint<16>(low)) {
2122 if (dst_low != lhs_low || low != 0) {
2123 __ Ori(dst_low, lhs_low, low);
2124 }
2125 } else {
2126 __ LoadConst32(TMP, low);
2127 __ Or(dst_low, lhs_low, TMP);
2128 }
2129 if (IsUint<16>(high)) {
2130 if (dst_high != lhs_high || high != 0) {
2131 __ Ori(dst_high, lhs_high, high);
2132 }
2133 } else {
2134 if (high != low) {
2135 __ LoadConst32(TMP, high);
2136 }
2137 __ Or(dst_high, lhs_high, TMP);
2138 }
2139 } else if (instruction->IsXor()) {
2140 uint32_t low = Low32Bits(value);
2141 uint32_t high = High32Bits(value);
2142 if (IsUint<16>(low)) {
2143 if (dst_low != lhs_low || low != 0) {
2144 __ Xori(dst_low, lhs_low, low);
2145 }
2146 } else {
2147 __ LoadConst32(TMP, low);
2148 __ Xor(dst_low, lhs_low, TMP);
2149 }
2150 if (IsUint<16>(high)) {
2151 if (dst_high != lhs_high || high != 0) {
2152 __ Xori(dst_high, lhs_high, high);
2153 }
2154 } else {
2155 if (high != low) {
2156 __ LoadConst32(TMP, high);
2157 }
2158 __ Xor(dst_high, lhs_high, TMP);
2159 }
2160 } else if (instruction->IsAnd()) {
2161 uint32_t low = Low32Bits(value);
2162 uint32_t high = High32Bits(value);
2163 if (IsUint<16>(low)) {
2164 __ Andi(dst_low, lhs_low, low);
2165 } else if (low != 0xFFFFFFFF) {
2166 __ LoadConst32(TMP, low);
2167 __ And(dst_low, lhs_low, TMP);
2168 } else if (dst_low != lhs_low) {
2169 __ Move(dst_low, lhs_low);
2170 }
2171 if (IsUint<16>(high)) {
2172 __ Andi(dst_high, lhs_high, high);
2173 } else if (high != 0xFFFFFFFF) {
2174 if (high != low) {
2175 __ LoadConst32(TMP, high);
2176 }
2177 __ And(dst_high, lhs_high, TMP);
2178 } else if (dst_high != lhs_high) {
2179 __ Move(dst_high, lhs_high);
2180 }
2181 } else {
2182 if (instruction->IsSub()) {
2183 value = -value;
2184 } else {
2185 DCHECK(instruction->IsAdd());
2186 }
2187 int32_t low = Low32Bits(value);
2188 int32_t high = High32Bits(value);
2189 if (IsInt<16>(low)) {
2190 if (dst_low != lhs_low || low != 0) {
2191 __ Addiu(dst_low, lhs_low, low);
2192 }
2193 if (low != 0) {
2194 __ Sltiu(AT, dst_low, low);
2195 }
2196 } else {
2197 __ LoadConst32(TMP, low);
2198 __ Addu(dst_low, lhs_low, TMP);
2199 __ Sltu(AT, dst_low, TMP);
2200 }
2201 if (IsInt<16>(high)) {
2202 if (dst_high != lhs_high || high != 0) {
2203 __ Addiu(dst_high, lhs_high, high);
2204 }
2205 } else {
2206 if (high != low) {
2207 __ LoadConst32(TMP, high);
2208 }
2209 __ Addu(dst_high, lhs_high, TMP);
2210 }
2211 if (low != 0) {
2212 __ Addu(dst_high, dst_high, AT);
2213 }
2214 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002215 }
2216 break;
2217 }
2218
2219 case Primitive::kPrimFloat:
2220 case Primitive::kPrimDouble: {
2221 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2222 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2223 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2224 if (instruction->IsAdd()) {
2225 if (type == Primitive::kPrimFloat) {
2226 __ AddS(dst, lhs, rhs);
2227 } else {
2228 __ AddD(dst, lhs, rhs);
2229 }
2230 } else {
2231 DCHECK(instruction->IsSub());
2232 if (type == Primitive::kPrimFloat) {
2233 __ SubS(dst, lhs, rhs);
2234 } else {
2235 __ SubD(dst, lhs, rhs);
2236 }
2237 }
2238 break;
2239 }
2240
2241 default:
2242 LOG(FATAL) << "Unexpected binary operation type " << type;
2243 }
2244}
2245
2246void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002247 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002248
2249 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2250 Primitive::Type type = instr->GetResultType();
2251 switch (type) {
2252 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002253 locations->SetInAt(0, Location::RequiresRegister());
2254 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2255 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2256 break;
2257 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002258 locations->SetInAt(0, Location::RequiresRegister());
2259 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2260 locations->SetOut(Location::RequiresRegister());
2261 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002262 default:
2263 LOG(FATAL) << "Unexpected shift type " << type;
2264 }
2265}
2266
2267static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2268
2269void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002270 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002271 LocationSummary* locations = instr->GetLocations();
2272 Primitive::Type type = instr->GetType();
2273
2274 Location rhs_location = locations->InAt(1);
2275 bool use_imm = rhs_location.IsConstant();
2276 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2277 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002278 const uint32_t shift_mask =
2279 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002280 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002281 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2282 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002283
2284 switch (type) {
2285 case Primitive::kPrimInt: {
2286 Register dst = locations->Out().AsRegister<Register>();
2287 Register lhs = locations->InAt(0).AsRegister<Register>();
2288 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002289 if (shift_value == 0) {
2290 if (dst != lhs) {
2291 __ Move(dst, lhs);
2292 }
2293 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002294 __ Sll(dst, lhs, shift_value);
2295 } else if (instr->IsShr()) {
2296 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002297 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002298 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002299 } else {
2300 if (has_ins_rotr) {
2301 __ Rotr(dst, lhs, shift_value);
2302 } else {
2303 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2304 __ Srl(dst, lhs, shift_value);
2305 __ Or(dst, dst, TMP);
2306 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002307 }
2308 } else {
2309 if (instr->IsShl()) {
2310 __ Sllv(dst, lhs, rhs_reg);
2311 } else if (instr->IsShr()) {
2312 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002313 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002314 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002315 } else {
2316 if (has_ins_rotr) {
2317 __ Rotrv(dst, lhs, rhs_reg);
2318 } else {
2319 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002320 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2321 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2322 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2323 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2324 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002325 __ Sllv(TMP, lhs, TMP);
2326 __ Srlv(dst, lhs, rhs_reg);
2327 __ Or(dst, dst, TMP);
2328 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002329 }
2330 }
2331 break;
2332 }
2333
2334 case Primitive::kPrimLong: {
2335 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2336 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2337 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2338 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2339 if (use_imm) {
2340 if (shift_value == 0) {
2341 codegen_->Move64(locations->Out(), locations->InAt(0));
2342 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002343 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002344 if (instr->IsShl()) {
2345 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2346 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2347 __ Sll(dst_low, lhs_low, shift_value);
2348 } else if (instr->IsShr()) {
2349 __ Srl(dst_low, lhs_low, shift_value);
2350 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2351 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002352 } else if (instr->IsUShr()) {
2353 __ Srl(dst_low, lhs_low, shift_value);
2354 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2355 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002356 } else {
2357 __ Srl(dst_low, lhs_low, shift_value);
2358 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2359 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002360 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002361 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002362 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002363 if (instr->IsShl()) {
2364 __ Sll(dst_low, lhs_low, shift_value);
2365 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2366 __ Sll(dst_high, lhs_high, shift_value);
2367 __ Or(dst_high, dst_high, TMP);
2368 } else if (instr->IsShr()) {
2369 __ Sra(dst_high, lhs_high, shift_value);
2370 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2371 __ Srl(dst_low, lhs_low, shift_value);
2372 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002373 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002374 __ Srl(dst_high, lhs_high, shift_value);
2375 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2376 __ Srl(dst_low, lhs_low, shift_value);
2377 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002378 } else {
2379 __ Srl(TMP, lhs_low, shift_value);
2380 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2381 __ Or(dst_low, dst_low, TMP);
2382 __ Srl(TMP, lhs_high, shift_value);
2383 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2384 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002385 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002386 }
2387 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002388 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002389 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002390 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002391 __ Move(dst_low, ZERO);
2392 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002393 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002394 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002395 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002396 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002397 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002398 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002399 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002400 // 64-bit rotation by 32 is just a swap.
2401 __ Move(dst_low, lhs_high);
2402 __ Move(dst_high, lhs_low);
2403 } else {
2404 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002405 __ Srl(dst_low, lhs_high, shift_value_high);
2406 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2407 __ Srl(dst_high, lhs_low, shift_value_high);
2408 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002409 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002410 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2411 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002412 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002413 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2414 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002415 __ Or(dst_high, dst_high, TMP);
2416 }
2417 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002418 }
2419 }
2420 } else {
2421 MipsLabel done;
2422 if (instr->IsShl()) {
2423 __ Sllv(dst_low, lhs_low, rhs_reg);
2424 __ Nor(AT, ZERO, rhs_reg);
2425 __ Srl(TMP, lhs_low, 1);
2426 __ Srlv(TMP, TMP, AT);
2427 __ Sllv(dst_high, lhs_high, rhs_reg);
2428 __ Or(dst_high, dst_high, TMP);
2429 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2430 __ Beqz(TMP, &done);
2431 __ Move(dst_high, dst_low);
2432 __ Move(dst_low, ZERO);
2433 } else if (instr->IsShr()) {
2434 __ Srav(dst_high, lhs_high, rhs_reg);
2435 __ Nor(AT, ZERO, rhs_reg);
2436 __ Sll(TMP, lhs_high, 1);
2437 __ Sllv(TMP, TMP, AT);
2438 __ Srlv(dst_low, lhs_low, rhs_reg);
2439 __ Or(dst_low, dst_low, TMP);
2440 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2441 __ Beqz(TMP, &done);
2442 __ Move(dst_low, dst_high);
2443 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002444 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002445 __ Srlv(dst_high, lhs_high, rhs_reg);
2446 __ Nor(AT, ZERO, rhs_reg);
2447 __ Sll(TMP, lhs_high, 1);
2448 __ Sllv(TMP, TMP, AT);
2449 __ Srlv(dst_low, lhs_low, rhs_reg);
2450 __ Or(dst_low, dst_low, TMP);
2451 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2452 __ Beqz(TMP, &done);
2453 __ Move(dst_low, dst_high);
2454 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002455 } else {
2456 __ Nor(AT, ZERO, rhs_reg);
2457 __ Srlv(TMP, lhs_low, rhs_reg);
2458 __ Sll(dst_low, lhs_high, 1);
2459 __ Sllv(dst_low, dst_low, AT);
2460 __ Or(dst_low, dst_low, TMP);
2461 __ Srlv(TMP, lhs_high, rhs_reg);
2462 __ Sll(dst_high, lhs_low, 1);
2463 __ Sllv(dst_high, dst_high, AT);
2464 __ Or(dst_high, dst_high, TMP);
2465 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2466 __ Beqz(TMP, &done);
2467 __ Move(TMP, dst_high);
2468 __ Move(dst_high, dst_low);
2469 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002470 }
2471 __ Bind(&done);
2472 }
2473 break;
2474 }
2475
2476 default:
2477 LOG(FATAL) << "Unexpected shift operation type " << type;
2478 }
2479}
2480
2481void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2482 HandleBinaryOp(instruction);
2483}
2484
2485void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2486 HandleBinaryOp(instruction);
2487}
2488
2489void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2490 HandleBinaryOp(instruction);
2491}
2492
2493void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2494 HandleBinaryOp(instruction);
2495}
2496
2497void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002498 Primitive::Type type = instruction->GetType();
2499 bool object_array_get_with_read_barrier =
2500 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002501 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002502 new (GetGraph()->GetArena()) LocationSummary(instruction,
2503 object_array_get_with_read_barrier
2504 ? LocationSummary::kCallOnSlowPath
2505 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002506 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2507 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2508 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002509 locations->SetInAt(0, Location::RequiresRegister());
2510 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002511 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002512 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2513 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002514 // The output overlaps in the case of an object array get with
2515 // read barriers enabled: we do not want the move to overwrite the
2516 // array's location, as we need it to emit the read barrier.
2517 locations->SetOut(Location::RequiresRegister(),
2518 object_array_get_with_read_barrier
2519 ? Location::kOutputOverlap
2520 : Location::kNoOutputOverlap);
2521 }
2522 // We need a temporary register for the read barrier marking slow
2523 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2524 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2525 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002526 }
2527}
2528
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002529static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2530 auto null_checker = [codegen, instruction]() {
2531 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002532 };
2533 return null_checker;
2534}
2535
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002536void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2537 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002538 Location obj_loc = locations->InAt(0);
2539 Register obj = obj_loc.AsRegister<Register>();
2540 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002541 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002542 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002543 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002544
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002545 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002546 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2547 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002548 switch (type) {
2549 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002550 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002551 if (index.IsConstant()) {
2552 size_t offset =
2553 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002554 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002555 } else {
2556 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002557 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002558 }
2559 break;
2560 }
2561
2562 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002563 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002564 if (index.IsConstant()) {
2565 size_t offset =
2566 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002567 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002568 } else {
2569 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002570 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002571 }
2572 break;
2573 }
2574
2575 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002576 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002577 if (index.IsConstant()) {
2578 size_t offset =
2579 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002580 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002581 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002582 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002583 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002584 }
2585 break;
2586 }
2587
2588 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002589 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002590 if (maybe_compressed_char_at) {
2591 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2592 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2593 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2594 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2595 "Expecting 0=compressed, 1=uncompressed");
2596 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002597 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002598 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2599 if (maybe_compressed_char_at) {
2600 MipsLabel uncompressed_load, done;
2601 __ Bnez(TMP, &uncompressed_load);
2602 __ LoadFromOffset(kLoadUnsignedByte,
2603 out,
2604 obj,
2605 data_offset + (const_index << TIMES_1));
2606 __ B(&done);
2607 __ Bind(&uncompressed_load);
2608 __ LoadFromOffset(kLoadUnsignedHalfword,
2609 out,
2610 obj,
2611 data_offset + (const_index << TIMES_2));
2612 __ Bind(&done);
2613 } else {
2614 __ LoadFromOffset(kLoadUnsignedHalfword,
2615 out,
2616 obj,
2617 data_offset + (const_index << TIMES_2),
2618 null_checker);
2619 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002620 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002621 Register index_reg = index.AsRegister<Register>();
2622 if (maybe_compressed_char_at) {
2623 MipsLabel uncompressed_load, done;
2624 __ Bnez(TMP, &uncompressed_load);
2625 __ Addu(TMP, obj, index_reg);
2626 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2627 __ B(&done);
2628 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002629 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002630 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2631 __ Bind(&done);
2632 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002633 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002634 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2635 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002636 }
2637 break;
2638 }
2639
Alexey Frunze15958152017-02-09 19:08:30 -08002640 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002641 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002642 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002643 if (index.IsConstant()) {
2644 size_t offset =
2645 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002646 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002647 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002648 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002649 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002650 }
2651 break;
2652 }
2653
Alexey Frunze15958152017-02-09 19:08:30 -08002654 case Primitive::kPrimNot: {
2655 static_assert(
2656 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2657 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2658 // /* HeapReference<Object> */ out =
2659 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2660 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2661 Location temp = locations->GetTemp(0);
2662 // Note that a potential implicit null check is handled in this
2663 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
2664 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2665 out_loc,
2666 obj,
2667 data_offset,
2668 index,
2669 temp,
2670 /* needs_null_check */ true);
2671 } else {
2672 Register out = out_loc.AsRegister<Register>();
2673 if (index.IsConstant()) {
2674 size_t offset =
2675 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2676 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2677 // If read barriers are enabled, emit read barriers other than
2678 // Baker's using a slow path (and also unpoison the loaded
2679 // reference, if heap poisoning is enabled).
2680 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2681 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002682 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002683 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2684 // If read barriers are enabled, emit read barriers other than
2685 // Baker's using a slow path (and also unpoison the loaded
2686 // reference, if heap poisoning is enabled).
2687 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2688 out_loc,
2689 out_loc,
2690 obj_loc,
2691 data_offset,
2692 index);
2693 }
2694 }
2695 break;
2696 }
2697
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002698 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002699 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002700 if (index.IsConstant()) {
2701 size_t offset =
2702 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002703 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002704 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002705 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002706 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002707 }
2708 break;
2709 }
2710
2711 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002712 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002713 if (index.IsConstant()) {
2714 size_t offset =
2715 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002716 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002717 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002718 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002719 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002720 }
2721 break;
2722 }
2723
2724 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002725 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002726 if (index.IsConstant()) {
2727 size_t offset =
2728 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002729 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002730 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002731 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002732 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002733 }
2734 break;
2735 }
2736
2737 case Primitive::kPrimVoid:
2738 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2739 UNREACHABLE();
2740 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002741}
2742
2743void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2744 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2745 locations->SetInAt(0, Location::RequiresRegister());
2746 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2747}
2748
2749void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2750 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002751 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002752 Register obj = locations->InAt(0).AsRegister<Register>();
2753 Register out = locations->Out().AsRegister<Register>();
2754 __ LoadFromOffset(kLoadWord, out, obj, offset);
2755 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002756 // Mask out compression flag from String's array length.
2757 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2758 __ Srl(out, out, 1u);
2759 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002760}
2761
Alexey Frunzef58b2482016-09-02 22:14:06 -07002762Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2763 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2764 ? Location::ConstantLocation(instruction->AsConstant())
2765 : Location::RequiresRegister();
2766}
2767
2768Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2769 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2770 // We can store a non-zero float or double constant without first loading it into the FPU,
2771 // but we should only prefer this if the constant has a single use.
2772 if (instruction->IsConstant() &&
2773 (instruction->AsConstant()->IsZeroBitPattern() ||
2774 instruction->GetUses().HasExactlyOneElement())) {
2775 return Location::ConstantLocation(instruction->AsConstant());
2776 // Otherwise fall through and require an FPU register for the constant.
2777 }
2778 return Location::RequiresFpuRegister();
2779}
2780
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002781void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002782 Primitive::Type value_type = instruction->GetComponentType();
2783
2784 bool needs_write_barrier =
2785 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2786 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2787
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002788 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2789 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002790 may_need_runtime_call_for_type_check ?
2791 LocationSummary::kCallOnSlowPath :
2792 LocationSummary::kNoCall);
2793
2794 locations->SetInAt(0, Location::RequiresRegister());
2795 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2796 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2797 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002798 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002799 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2800 }
2801 if (needs_write_barrier) {
2802 // Temporary register for the write barrier.
2803 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002804 }
2805}
2806
2807void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2808 LocationSummary* locations = instruction->GetLocations();
2809 Register obj = locations->InAt(0).AsRegister<Register>();
2810 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002811 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002812 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002813 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002814 bool needs_write_barrier =
2815 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002816 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002817 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002818
2819 switch (value_type) {
2820 case Primitive::kPrimBoolean:
2821 case Primitive::kPrimByte: {
2822 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002823 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002824 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002825 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002826 __ Addu(base_reg, obj, index.AsRegister<Register>());
2827 }
2828 if (value_location.IsConstant()) {
2829 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2830 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2831 } else {
2832 Register value = value_location.AsRegister<Register>();
2833 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002834 }
2835 break;
2836 }
2837
2838 case Primitive::kPrimShort:
2839 case Primitive::kPrimChar: {
2840 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002841 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002842 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002843 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002844 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002845 }
2846 if (value_location.IsConstant()) {
2847 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2848 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2849 } else {
2850 Register value = value_location.AsRegister<Register>();
2851 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002852 }
2853 break;
2854 }
2855
Alexey Frunze15958152017-02-09 19:08:30 -08002856 case Primitive::kPrimInt: {
2857 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2858 if (index.IsConstant()) {
2859 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2860 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002861 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002862 }
2863 if (value_location.IsConstant()) {
2864 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2865 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2866 } else {
2867 Register value = value_location.AsRegister<Register>();
2868 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2869 }
2870 break;
2871 }
2872
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002873 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002874 if (value_location.IsConstant()) {
2875 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002876 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002877 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002878 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002879 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002880 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002881 }
Alexey Frunze15958152017-02-09 19:08:30 -08002882 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2883 DCHECK_EQ(value, 0);
2884 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2885 DCHECK(!needs_write_barrier);
2886 DCHECK(!may_need_runtime_call_for_type_check);
2887 break;
2888 }
2889
2890 DCHECK(needs_write_barrier);
2891 Register value = value_location.AsRegister<Register>();
2892 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2893 Register temp2 = TMP; // Doesn't need to survive slow path.
2894 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2895 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2896 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2897 MipsLabel done;
2898 SlowPathCodeMIPS* slow_path = nullptr;
2899
2900 if (may_need_runtime_call_for_type_check) {
2901 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2902 codegen_->AddSlowPath(slow_path);
2903 if (instruction->GetValueCanBeNull()) {
2904 MipsLabel non_zero;
2905 __ Bnez(value, &non_zero);
2906 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2907 if (index.IsConstant()) {
2908 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002909 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002910 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002911 }
Alexey Frunze15958152017-02-09 19:08:30 -08002912 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2913 __ B(&done);
2914 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002915 }
Alexey Frunze15958152017-02-09 19:08:30 -08002916
2917 // Note that when read barriers are enabled, the type checks
2918 // are performed without read barriers. This is fine, even in
2919 // the case where a class object is in the from-space after
2920 // the flip, as a comparison involving such a type would not
2921 // produce a false positive; it may of course produce a false
2922 // negative, in which case we would take the ArraySet slow
2923 // path.
2924
2925 // /* HeapReference<Class> */ temp1 = obj->klass_
2926 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2927 __ MaybeUnpoisonHeapReference(temp1);
2928
2929 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2930 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2931 // /* HeapReference<Class> */ temp2 = value->klass_
2932 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2933 // If heap poisoning is enabled, no need to unpoison `temp1`
2934 // nor `temp2`, as we are comparing two poisoned references.
2935
2936 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2937 MipsLabel do_put;
2938 __ Beq(temp1, temp2, &do_put);
2939 // If heap poisoning is enabled, the `temp1` reference has
2940 // not been unpoisoned yet; unpoison it now.
2941 __ MaybeUnpoisonHeapReference(temp1);
2942
2943 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2944 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
2945 // If heap poisoning is enabled, no need to unpoison
2946 // `temp1`, as we are comparing against null below.
2947 __ Bnez(temp1, slow_path->GetEntryLabel());
2948 __ Bind(&do_put);
2949 } else {
2950 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
2951 }
2952 }
2953
2954 Register source = value;
2955 if (kPoisonHeapReferences) {
2956 // Note that in the case where `value` is a null reference,
2957 // we do not enter this block, as a null reference does not
2958 // need poisoning.
2959 __ Move(temp1, value);
2960 __ PoisonHeapReference(temp1);
2961 source = temp1;
2962 }
2963
2964 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2965 if (index.IsConstant()) {
2966 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002967 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002968 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002969 }
2970 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
2971
2972 if (!may_need_runtime_call_for_type_check) {
2973 codegen_->MaybeRecordImplicitNullCheck(instruction);
2974 }
2975
2976 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
2977
2978 if (done.IsLinked()) {
2979 __ Bind(&done);
2980 }
2981
2982 if (slow_path != nullptr) {
2983 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002984 }
2985 break;
2986 }
2987
2988 case Primitive::kPrimLong: {
2989 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002990 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002991 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002992 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002993 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002994 }
2995 if (value_location.IsConstant()) {
2996 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
2997 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
2998 } else {
2999 Register value = value_location.AsRegisterPairLow<Register>();
3000 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003001 }
3002 break;
3003 }
3004
3005 case Primitive::kPrimFloat: {
3006 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003007 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003008 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003009 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003010 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003011 }
3012 if (value_location.IsConstant()) {
3013 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3014 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3015 } else {
3016 FRegister value = value_location.AsFpuRegister<FRegister>();
3017 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003018 }
3019 break;
3020 }
3021
3022 case Primitive::kPrimDouble: {
3023 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003024 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003025 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003026 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003027 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003028 }
3029 if (value_location.IsConstant()) {
3030 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3031 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3032 } else {
3033 FRegister value = value_location.AsFpuRegister<FRegister>();
3034 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003035 }
3036 break;
3037 }
3038
3039 case Primitive::kPrimVoid:
3040 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3041 UNREACHABLE();
3042 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003043}
3044
3045void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003046 RegisterSet caller_saves = RegisterSet::Empty();
3047 InvokeRuntimeCallingConvention calling_convention;
3048 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3049 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3050 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003051 locations->SetInAt(0, Location::RequiresRegister());
3052 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003053}
3054
3055void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3056 LocationSummary* locations = instruction->GetLocations();
3057 BoundsCheckSlowPathMIPS* slow_path =
3058 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3059 codegen_->AddSlowPath(slow_path);
3060
3061 Register index = locations->InAt(0).AsRegister<Register>();
3062 Register length = locations->InAt(1).AsRegister<Register>();
3063
3064 // length is limited by the maximum positive signed 32-bit integer.
3065 // Unsigned comparison of length and index checks for index < 0
3066 // and for length <= index simultaneously.
3067 __ Bgeu(index, length, slow_path->GetEntryLabel());
3068}
3069
Alexey Frunze15958152017-02-09 19:08:30 -08003070// Temp is used for read barrier.
3071static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3072 if (kEmitCompilerReadBarrier &&
3073 (kUseBakerReadBarrier ||
3074 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3075 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3076 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3077 return 1;
3078 }
3079 return 0;
3080}
3081
3082// Extra temp is used for read barrier.
3083static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3084 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3085}
3086
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003087void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003088 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3089 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3090
3091 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3092 switch (type_check_kind) {
3093 case TypeCheckKind::kExactCheck:
3094 case TypeCheckKind::kAbstractClassCheck:
3095 case TypeCheckKind::kClassHierarchyCheck:
3096 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003097 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003098 ? LocationSummary::kCallOnSlowPath
3099 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3100 break;
3101 case TypeCheckKind::kArrayCheck:
3102 case TypeCheckKind::kUnresolvedCheck:
3103 case TypeCheckKind::kInterfaceCheck:
3104 call_kind = LocationSummary::kCallOnSlowPath;
3105 break;
3106 }
3107
3108 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003109 locations->SetInAt(0, Location::RequiresRegister());
3110 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003111 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003112}
3113
3114void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003115 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003116 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003117 Location obj_loc = locations->InAt(0);
3118 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003119 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003120 Location temp_loc = locations->GetTemp(0);
3121 Register temp = temp_loc.AsRegister<Register>();
3122 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3123 DCHECK_LE(num_temps, 2u);
3124 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003125 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3126 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3127 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3128 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3129 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3130 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3131 const uint32_t object_array_data_offset =
3132 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3133 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003134
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003135 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3136 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3137 // read barriers is done for performance and code size reasons.
3138 bool is_type_check_slow_path_fatal = false;
3139 if (!kEmitCompilerReadBarrier) {
3140 is_type_check_slow_path_fatal =
3141 (type_check_kind == TypeCheckKind::kExactCheck ||
3142 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3143 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3144 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3145 !instruction->CanThrowIntoCatchBlock();
3146 }
3147 SlowPathCodeMIPS* slow_path =
3148 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3149 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003150 codegen_->AddSlowPath(slow_path);
3151
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003152 // Avoid this check if we know `obj` is not null.
3153 if (instruction->MustDoNullCheck()) {
3154 __ Beqz(obj, &done);
3155 }
3156
3157 switch (type_check_kind) {
3158 case TypeCheckKind::kExactCheck:
3159 case TypeCheckKind::kArrayCheck: {
3160 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003161 GenerateReferenceLoadTwoRegisters(instruction,
3162 temp_loc,
3163 obj_loc,
3164 class_offset,
3165 maybe_temp2_loc,
3166 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003167 // Jump to slow path for throwing the exception or doing a
3168 // more involved array check.
3169 __ Bne(temp, cls, slow_path->GetEntryLabel());
3170 break;
3171 }
3172
3173 case TypeCheckKind::kAbstractClassCheck: {
3174 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003175 GenerateReferenceLoadTwoRegisters(instruction,
3176 temp_loc,
3177 obj_loc,
3178 class_offset,
3179 maybe_temp2_loc,
3180 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003181 // If the class is abstract, we eagerly fetch the super class of the
3182 // object to avoid doing a comparison we know will fail.
3183 MipsLabel loop;
3184 __ Bind(&loop);
3185 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003186 GenerateReferenceLoadOneRegister(instruction,
3187 temp_loc,
3188 super_offset,
3189 maybe_temp2_loc,
3190 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003191 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3192 // exception.
3193 __ Beqz(temp, slow_path->GetEntryLabel());
3194 // Otherwise, compare the classes.
3195 __ Bne(temp, cls, &loop);
3196 break;
3197 }
3198
3199 case TypeCheckKind::kClassHierarchyCheck: {
3200 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003201 GenerateReferenceLoadTwoRegisters(instruction,
3202 temp_loc,
3203 obj_loc,
3204 class_offset,
3205 maybe_temp2_loc,
3206 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003207 // Walk over the class hierarchy to find a match.
3208 MipsLabel loop;
3209 __ Bind(&loop);
3210 __ Beq(temp, cls, &done);
3211 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003212 GenerateReferenceLoadOneRegister(instruction,
3213 temp_loc,
3214 super_offset,
3215 maybe_temp2_loc,
3216 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003217 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3218 // exception. Otherwise, jump to the beginning of the loop.
3219 __ Bnez(temp, &loop);
3220 __ B(slow_path->GetEntryLabel());
3221 break;
3222 }
3223
3224 case TypeCheckKind::kArrayObjectCheck: {
3225 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003226 GenerateReferenceLoadTwoRegisters(instruction,
3227 temp_loc,
3228 obj_loc,
3229 class_offset,
3230 maybe_temp2_loc,
3231 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003232 // Do an exact check.
3233 __ Beq(temp, cls, &done);
3234 // Otherwise, we need to check that the object's class is a non-primitive array.
3235 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003236 GenerateReferenceLoadOneRegister(instruction,
3237 temp_loc,
3238 component_offset,
3239 maybe_temp2_loc,
3240 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003241 // If the component type is null, jump to the slow path to throw the exception.
3242 __ Beqz(temp, slow_path->GetEntryLabel());
3243 // Otherwise, the object is indeed an array, further check that this component
3244 // type is not a primitive type.
3245 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3246 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3247 __ Bnez(temp, slow_path->GetEntryLabel());
3248 break;
3249 }
3250
3251 case TypeCheckKind::kUnresolvedCheck:
3252 // We always go into the type check slow path for the unresolved check case.
3253 // We cannot directly call the CheckCast runtime entry point
3254 // without resorting to a type checking slow path here (i.e. by
3255 // calling InvokeRuntime directly), as it would require to
3256 // assign fixed registers for the inputs of this HInstanceOf
3257 // instruction (following the runtime calling convention), which
3258 // might be cluttered by the potential first read barrier
3259 // emission at the beginning of this method.
3260 __ B(slow_path->GetEntryLabel());
3261 break;
3262
3263 case TypeCheckKind::kInterfaceCheck: {
3264 // Avoid read barriers to improve performance of the fast path. We can not get false
3265 // positives by doing this.
3266 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003267 GenerateReferenceLoadTwoRegisters(instruction,
3268 temp_loc,
3269 obj_loc,
3270 class_offset,
3271 maybe_temp2_loc,
3272 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003273 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003274 GenerateReferenceLoadTwoRegisters(instruction,
3275 temp_loc,
3276 temp_loc,
3277 iftable_offset,
3278 maybe_temp2_loc,
3279 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003280 // Iftable is never null.
3281 __ Lw(TMP, temp, array_length_offset);
3282 // Loop through the iftable and check if any class matches.
3283 MipsLabel loop;
3284 __ Bind(&loop);
3285 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3286 __ Beqz(TMP, slow_path->GetEntryLabel());
3287 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3288 __ MaybeUnpoisonHeapReference(AT);
3289 // Go to next interface.
3290 __ Addiu(TMP, TMP, -2);
3291 // Compare the classes and continue the loop if they do not match.
3292 __ Bne(AT, cls, &loop);
3293 break;
3294 }
3295 }
3296
3297 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003298 __ Bind(slow_path->GetExitLabel());
3299}
3300
3301void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3302 LocationSummary* locations =
3303 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3304 locations->SetInAt(0, Location::RequiresRegister());
3305 if (check->HasUses()) {
3306 locations->SetOut(Location::SameAsFirstInput());
3307 }
3308}
3309
3310void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3311 // We assume the class is not null.
3312 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3313 check->GetLoadClass(),
3314 check,
3315 check->GetDexPc(),
3316 true);
3317 codegen_->AddSlowPath(slow_path);
3318 GenerateClassInitializationCheck(slow_path,
3319 check->GetLocations()->InAt(0).AsRegister<Register>());
3320}
3321
3322void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3323 Primitive::Type in_type = compare->InputAt(0)->GetType();
3324
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003325 LocationSummary* locations =
3326 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003327
3328 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003329 case Primitive::kPrimBoolean:
3330 case Primitive::kPrimByte:
3331 case Primitive::kPrimShort:
3332 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003333 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003334 locations->SetInAt(0, Location::RequiresRegister());
3335 locations->SetInAt(1, Location::RequiresRegister());
3336 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3337 break;
3338
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003339 case Primitive::kPrimLong:
3340 locations->SetInAt(0, Location::RequiresRegister());
3341 locations->SetInAt(1, Location::RequiresRegister());
3342 // Output overlaps because it is written before doing the low comparison.
3343 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3344 break;
3345
3346 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003347 case Primitive::kPrimDouble:
3348 locations->SetInAt(0, Location::RequiresFpuRegister());
3349 locations->SetInAt(1, Location::RequiresFpuRegister());
3350 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003351 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003352
3353 default:
3354 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3355 }
3356}
3357
3358void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3359 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003360 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003361 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003362 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003363
3364 // 0 if: left == right
3365 // 1 if: left > right
3366 // -1 if: left < right
3367 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003368 case Primitive::kPrimBoolean:
3369 case Primitive::kPrimByte:
3370 case Primitive::kPrimShort:
3371 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003372 case Primitive::kPrimInt: {
3373 Register lhs = locations->InAt(0).AsRegister<Register>();
3374 Register rhs = locations->InAt(1).AsRegister<Register>();
3375 __ Slt(TMP, lhs, rhs);
3376 __ Slt(res, rhs, lhs);
3377 __ Subu(res, res, TMP);
3378 break;
3379 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003380 case Primitive::kPrimLong: {
3381 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003382 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3383 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3384 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3385 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3386 // TODO: more efficient (direct) comparison with a constant.
3387 __ Slt(TMP, lhs_high, rhs_high);
3388 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3389 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3390 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3391 __ Sltu(TMP, lhs_low, rhs_low);
3392 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3393 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3394 __ Bind(&done);
3395 break;
3396 }
3397
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003398 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003399 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003400 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3401 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3402 MipsLabel done;
3403 if (isR6) {
3404 __ CmpEqS(FTMP, lhs, rhs);
3405 __ LoadConst32(res, 0);
3406 __ Bc1nez(FTMP, &done);
3407 if (gt_bias) {
3408 __ CmpLtS(FTMP, lhs, rhs);
3409 __ LoadConst32(res, -1);
3410 __ Bc1nez(FTMP, &done);
3411 __ LoadConst32(res, 1);
3412 } else {
3413 __ CmpLtS(FTMP, rhs, lhs);
3414 __ LoadConst32(res, 1);
3415 __ Bc1nez(FTMP, &done);
3416 __ LoadConst32(res, -1);
3417 }
3418 } else {
3419 if (gt_bias) {
3420 __ ColtS(0, lhs, rhs);
3421 __ LoadConst32(res, -1);
3422 __ Bc1t(0, &done);
3423 __ CeqS(0, lhs, rhs);
3424 __ LoadConst32(res, 1);
3425 __ Movt(res, ZERO, 0);
3426 } else {
3427 __ ColtS(0, rhs, lhs);
3428 __ LoadConst32(res, 1);
3429 __ Bc1t(0, &done);
3430 __ CeqS(0, lhs, rhs);
3431 __ LoadConst32(res, -1);
3432 __ Movt(res, ZERO, 0);
3433 }
3434 }
3435 __ Bind(&done);
3436 break;
3437 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003438 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003439 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003440 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3441 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3442 MipsLabel done;
3443 if (isR6) {
3444 __ CmpEqD(FTMP, lhs, rhs);
3445 __ LoadConst32(res, 0);
3446 __ Bc1nez(FTMP, &done);
3447 if (gt_bias) {
3448 __ CmpLtD(FTMP, lhs, rhs);
3449 __ LoadConst32(res, -1);
3450 __ Bc1nez(FTMP, &done);
3451 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003452 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003453 __ CmpLtD(FTMP, rhs, lhs);
3454 __ LoadConst32(res, 1);
3455 __ Bc1nez(FTMP, &done);
3456 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003457 }
3458 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003459 if (gt_bias) {
3460 __ ColtD(0, lhs, rhs);
3461 __ LoadConst32(res, -1);
3462 __ Bc1t(0, &done);
3463 __ CeqD(0, lhs, rhs);
3464 __ LoadConst32(res, 1);
3465 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003466 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003467 __ ColtD(0, rhs, lhs);
3468 __ LoadConst32(res, 1);
3469 __ Bc1t(0, &done);
3470 __ CeqD(0, lhs, rhs);
3471 __ LoadConst32(res, -1);
3472 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003473 }
3474 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003475 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003476 break;
3477 }
3478
3479 default:
3480 LOG(FATAL) << "Unimplemented compare type " << in_type;
3481 }
3482}
3483
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003484void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003485 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003486 switch (instruction->InputAt(0)->GetType()) {
3487 default:
3488 case Primitive::kPrimLong:
3489 locations->SetInAt(0, Location::RequiresRegister());
3490 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3491 break;
3492
3493 case Primitive::kPrimFloat:
3494 case Primitive::kPrimDouble:
3495 locations->SetInAt(0, Location::RequiresFpuRegister());
3496 locations->SetInAt(1, Location::RequiresFpuRegister());
3497 break;
3498 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003499 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003500 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3501 }
3502}
3503
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003504void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003505 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003506 return;
3507 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003508
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003509 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003510 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003511
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003512 switch (type) {
3513 default:
3514 // Integer case.
3515 GenerateIntCompare(instruction->GetCondition(), locations);
3516 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003517
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003518 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003519 GenerateLongCompare(instruction->GetCondition(), locations);
3520 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003521
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003522 case Primitive::kPrimFloat:
3523 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003524 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3525 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003526 }
3527}
3528
Alexey Frunze7e99e052015-11-24 19:28:01 -08003529void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3530 DCHECK(instruction->IsDiv() || instruction->IsRem());
3531 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3532
3533 LocationSummary* locations = instruction->GetLocations();
3534 Location second = locations->InAt(1);
3535 DCHECK(second.IsConstant());
3536
3537 Register out = locations->Out().AsRegister<Register>();
3538 Register dividend = locations->InAt(0).AsRegister<Register>();
3539 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3540 DCHECK(imm == 1 || imm == -1);
3541
3542 if (instruction->IsRem()) {
3543 __ Move(out, ZERO);
3544 } else {
3545 if (imm == -1) {
3546 __ Subu(out, ZERO, dividend);
3547 } else if (out != dividend) {
3548 __ Move(out, dividend);
3549 }
3550 }
3551}
3552
3553void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3554 DCHECK(instruction->IsDiv() || instruction->IsRem());
3555 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3556
3557 LocationSummary* locations = instruction->GetLocations();
3558 Location second = locations->InAt(1);
3559 DCHECK(second.IsConstant());
3560
3561 Register out = locations->Out().AsRegister<Register>();
3562 Register dividend = locations->InAt(0).AsRegister<Register>();
3563 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003564 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003565 int ctz_imm = CTZ(abs_imm);
3566
3567 if (instruction->IsDiv()) {
3568 if (ctz_imm == 1) {
3569 // Fast path for division by +/-2, which is very common.
3570 __ Srl(TMP, dividend, 31);
3571 } else {
3572 __ Sra(TMP, dividend, 31);
3573 __ Srl(TMP, TMP, 32 - ctz_imm);
3574 }
3575 __ Addu(out, dividend, TMP);
3576 __ Sra(out, out, ctz_imm);
3577 if (imm < 0) {
3578 __ Subu(out, ZERO, out);
3579 }
3580 } else {
3581 if (ctz_imm == 1) {
3582 // Fast path for modulo +/-2, which is very common.
3583 __ Sra(TMP, dividend, 31);
3584 __ Subu(out, dividend, TMP);
3585 __ Andi(out, out, 1);
3586 __ Addu(out, out, TMP);
3587 } else {
3588 __ Sra(TMP, dividend, 31);
3589 __ Srl(TMP, TMP, 32 - ctz_imm);
3590 __ Addu(out, dividend, TMP);
3591 if (IsUint<16>(abs_imm - 1)) {
3592 __ Andi(out, out, abs_imm - 1);
3593 } else {
3594 __ Sll(out, out, 32 - ctz_imm);
3595 __ Srl(out, out, 32 - ctz_imm);
3596 }
3597 __ Subu(out, out, TMP);
3598 }
3599 }
3600}
3601
3602void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3603 DCHECK(instruction->IsDiv() || instruction->IsRem());
3604 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3605
3606 LocationSummary* locations = instruction->GetLocations();
3607 Location second = locations->InAt(1);
3608 DCHECK(second.IsConstant());
3609
3610 Register out = locations->Out().AsRegister<Register>();
3611 Register dividend = locations->InAt(0).AsRegister<Register>();
3612 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3613
3614 int64_t magic;
3615 int shift;
3616 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3617
3618 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3619
3620 __ LoadConst32(TMP, magic);
3621 if (isR6) {
3622 __ MuhR6(TMP, dividend, TMP);
3623 } else {
3624 __ MultR2(dividend, TMP);
3625 __ Mfhi(TMP);
3626 }
3627 if (imm > 0 && magic < 0) {
3628 __ Addu(TMP, TMP, dividend);
3629 } else if (imm < 0 && magic > 0) {
3630 __ Subu(TMP, TMP, dividend);
3631 }
3632
3633 if (shift != 0) {
3634 __ Sra(TMP, TMP, shift);
3635 }
3636
3637 if (instruction->IsDiv()) {
3638 __ Sra(out, TMP, 31);
3639 __ Subu(out, TMP, out);
3640 } else {
3641 __ Sra(AT, TMP, 31);
3642 __ Subu(AT, TMP, AT);
3643 __ LoadConst32(TMP, imm);
3644 if (isR6) {
3645 __ MulR6(TMP, AT, TMP);
3646 } else {
3647 __ MulR2(TMP, AT, TMP);
3648 }
3649 __ Subu(out, dividend, TMP);
3650 }
3651}
3652
3653void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3654 DCHECK(instruction->IsDiv() || instruction->IsRem());
3655 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3656
3657 LocationSummary* locations = instruction->GetLocations();
3658 Register out = locations->Out().AsRegister<Register>();
3659 Location second = locations->InAt(1);
3660
3661 if (second.IsConstant()) {
3662 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3663 if (imm == 0) {
3664 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3665 } else if (imm == 1 || imm == -1) {
3666 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003667 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003668 DivRemByPowerOfTwo(instruction);
3669 } else {
3670 DCHECK(imm <= -2 || imm >= 2);
3671 GenerateDivRemWithAnyConstant(instruction);
3672 }
3673 } else {
3674 Register dividend = locations->InAt(0).AsRegister<Register>();
3675 Register divisor = second.AsRegister<Register>();
3676 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3677 if (instruction->IsDiv()) {
3678 if (isR6) {
3679 __ DivR6(out, dividend, divisor);
3680 } else {
3681 __ DivR2(out, dividend, divisor);
3682 }
3683 } else {
3684 if (isR6) {
3685 __ ModR6(out, dividend, divisor);
3686 } else {
3687 __ ModR2(out, dividend, divisor);
3688 }
3689 }
3690 }
3691}
3692
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003693void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3694 Primitive::Type type = div->GetResultType();
3695 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003696 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003697 : LocationSummary::kNoCall;
3698
3699 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3700
3701 switch (type) {
3702 case Primitive::kPrimInt:
3703 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003704 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003705 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3706 break;
3707
3708 case Primitive::kPrimLong: {
3709 InvokeRuntimeCallingConvention calling_convention;
3710 locations->SetInAt(0, Location::RegisterPairLocation(
3711 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3712 locations->SetInAt(1, Location::RegisterPairLocation(
3713 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3714 locations->SetOut(calling_convention.GetReturnLocation(type));
3715 break;
3716 }
3717
3718 case Primitive::kPrimFloat:
3719 case Primitive::kPrimDouble:
3720 locations->SetInAt(0, Location::RequiresFpuRegister());
3721 locations->SetInAt(1, Location::RequiresFpuRegister());
3722 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3723 break;
3724
3725 default:
3726 LOG(FATAL) << "Unexpected div type " << type;
3727 }
3728}
3729
3730void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3731 Primitive::Type type = instruction->GetType();
3732 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003733
3734 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003735 case Primitive::kPrimInt:
3736 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003737 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003738 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003739 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003740 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3741 break;
3742 }
3743 case Primitive::kPrimFloat:
3744 case Primitive::kPrimDouble: {
3745 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3746 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3747 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3748 if (type == Primitive::kPrimFloat) {
3749 __ DivS(dst, lhs, rhs);
3750 } else {
3751 __ DivD(dst, lhs, rhs);
3752 }
3753 break;
3754 }
3755 default:
3756 LOG(FATAL) << "Unexpected div type " << type;
3757 }
3758}
3759
3760void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003761 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003762 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003763}
3764
3765void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3766 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3767 codegen_->AddSlowPath(slow_path);
3768 Location value = instruction->GetLocations()->InAt(0);
3769 Primitive::Type type = instruction->GetType();
3770
3771 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003772 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003773 case Primitive::kPrimByte:
3774 case Primitive::kPrimChar:
3775 case Primitive::kPrimShort:
3776 case Primitive::kPrimInt: {
3777 if (value.IsConstant()) {
3778 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3779 __ B(slow_path->GetEntryLabel());
3780 } else {
3781 // A division by a non-null constant is valid. We don't need to perform
3782 // any check, so simply fall through.
3783 }
3784 } else {
3785 DCHECK(value.IsRegister()) << value;
3786 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3787 }
3788 break;
3789 }
3790 case Primitive::kPrimLong: {
3791 if (value.IsConstant()) {
3792 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3793 __ B(slow_path->GetEntryLabel());
3794 } else {
3795 // A division by a non-null constant is valid. We don't need to perform
3796 // any check, so simply fall through.
3797 }
3798 } else {
3799 DCHECK(value.IsRegisterPair()) << value;
3800 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3801 __ Beqz(TMP, slow_path->GetEntryLabel());
3802 }
3803 break;
3804 }
3805 default:
3806 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3807 }
3808}
3809
3810void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3811 LocationSummary* locations =
3812 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3813 locations->SetOut(Location::ConstantLocation(constant));
3814}
3815
3816void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3817 // Will be generated at use site.
3818}
3819
3820void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3821 exit->SetLocations(nullptr);
3822}
3823
3824void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3825}
3826
3827void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3828 LocationSummary* locations =
3829 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3830 locations->SetOut(Location::ConstantLocation(constant));
3831}
3832
3833void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3834 // Will be generated at use site.
3835}
3836
3837void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3838 got->SetLocations(nullptr);
3839}
3840
3841void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3842 DCHECK(!successor->IsExitBlock());
3843 HBasicBlock* block = got->GetBlock();
3844 HInstruction* previous = got->GetPrevious();
3845 HLoopInformation* info = block->GetLoopInformation();
3846
3847 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3848 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3849 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3850 return;
3851 }
3852 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3853 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3854 }
3855 if (!codegen_->GoesToNextBlock(block, successor)) {
3856 __ B(codegen_->GetLabelOf(successor));
3857 }
3858}
3859
3860void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3861 HandleGoto(got, got->GetSuccessor());
3862}
3863
3864void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3865 try_boundary->SetLocations(nullptr);
3866}
3867
3868void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3869 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3870 if (!successor->IsExitBlock()) {
3871 HandleGoto(try_boundary, successor);
3872 }
3873}
3874
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003875void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3876 LocationSummary* locations) {
3877 Register dst = locations->Out().AsRegister<Register>();
3878 Register lhs = locations->InAt(0).AsRegister<Register>();
3879 Location rhs_location = locations->InAt(1);
3880 Register rhs_reg = ZERO;
3881 int64_t rhs_imm = 0;
3882 bool use_imm = rhs_location.IsConstant();
3883 if (use_imm) {
3884 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3885 } else {
3886 rhs_reg = rhs_location.AsRegister<Register>();
3887 }
3888
3889 switch (cond) {
3890 case kCondEQ:
3891 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003892 if (use_imm && IsInt<16>(-rhs_imm)) {
3893 if (rhs_imm == 0) {
3894 if (cond == kCondEQ) {
3895 __ Sltiu(dst, lhs, 1);
3896 } else {
3897 __ Sltu(dst, ZERO, lhs);
3898 }
3899 } else {
3900 __ Addiu(dst, lhs, -rhs_imm);
3901 if (cond == kCondEQ) {
3902 __ Sltiu(dst, dst, 1);
3903 } else {
3904 __ Sltu(dst, ZERO, dst);
3905 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003906 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003907 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003908 if (use_imm && IsUint<16>(rhs_imm)) {
3909 __ Xori(dst, lhs, rhs_imm);
3910 } else {
3911 if (use_imm) {
3912 rhs_reg = TMP;
3913 __ LoadConst32(rhs_reg, rhs_imm);
3914 }
3915 __ Xor(dst, lhs, rhs_reg);
3916 }
3917 if (cond == kCondEQ) {
3918 __ Sltiu(dst, dst, 1);
3919 } else {
3920 __ Sltu(dst, ZERO, dst);
3921 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003922 }
3923 break;
3924
3925 case kCondLT:
3926 case kCondGE:
3927 if (use_imm && IsInt<16>(rhs_imm)) {
3928 __ Slti(dst, lhs, rhs_imm);
3929 } else {
3930 if (use_imm) {
3931 rhs_reg = TMP;
3932 __ LoadConst32(rhs_reg, rhs_imm);
3933 }
3934 __ Slt(dst, lhs, rhs_reg);
3935 }
3936 if (cond == kCondGE) {
3937 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3938 // only the slt instruction but no sge.
3939 __ Xori(dst, dst, 1);
3940 }
3941 break;
3942
3943 case kCondLE:
3944 case kCondGT:
3945 if (use_imm && IsInt<16>(rhs_imm + 1)) {
3946 // Simulate lhs <= rhs via lhs < rhs + 1.
3947 __ Slti(dst, lhs, rhs_imm + 1);
3948 if (cond == kCondGT) {
3949 // Simulate lhs > rhs via !(lhs <= rhs) since there's
3950 // only the slti instruction but no sgti.
3951 __ Xori(dst, dst, 1);
3952 }
3953 } else {
3954 if (use_imm) {
3955 rhs_reg = TMP;
3956 __ LoadConst32(rhs_reg, rhs_imm);
3957 }
3958 __ Slt(dst, rhs_reg, lhs);
3959 if (cond == kCondLE) {
3960 // Simulate lhs <= rhs via !(rhs < lhs) since there's
3961 // only the slt instruction but no sle.
3962 __ Xori(dst, dst, 1);
3963 }
3964 }
3965 break;
3966
3967 case kCondB:
3968 case kCondAE:
3969 if (use_imm && IsInt<16>(rhs_imm)) {
3970 // Sltiu sign-extends its 16-bit immediate operand before
3971 // the comparison and thus lets us compare directly with
3972 // unsigned values in the ranges [0, 0x7fff] and
3973 // [0xffff8000, 0xffffffff].
3974 __ Sltiu(dst, lhs, rhs_imm);
3975 } else {
3976 if (use_imm) {
3977 rhs_reg = TMP;
3978 __ LoadConst32(rhs_reg, rhs_imm);
3979 }
3980 __ Sltu(dst, lhs, rhs_reg);
3981 }
3982 if (cond == kCondAE) {
3983 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3984 // only the sltu instruction but no sgeu.
3985 __ Xori(dst, dst, 1);
3986 }
3987 break;
3988
3989 case kCondBE:
3990 case kCondA:
3991 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
3992 // Simulate lhs <= rhs via lhs < rhs + 1.
3993 // Note that this only works if rhs + 1 does not overflow
3994 // to 0, hence the check above.
3995 // Sltiu sign-extends its 16-bit immediate operand before
3996 // the comparison and thus lets us compare directly with
3997 // unsigned values in the ranges [0, 0x7fff] and
3998 // [0xffff8000, 0xffffffff].
3999 __ Sltiu(dst, lhs, rhs_imm + 1);
4000 if (cond == kCondA) {
4001 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4002 // only the sltiu instruction but no sgtiu.
4003 __ Xori(dst, dst, 1);
4004 }
4005 } else {
4006 if (use_imm) {
4007 rhs_reg = TMP;
4008 __ LoadConst32(rhs_reg, rhs_imm);
4009 }
4010 __ Sltu(dst, rhs_reg, lhs);
4011 if (cond == kCondBE) {
4012 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4013 // only the sltu instruction but no sleu.
4014 __ Xori(dst, dst, 1);
4015 }
4016 }
4017 break;
4018 }
4019}
4020
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004021bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4022 LocationSummary* input_locations,
4023 Register dst) {
4024 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4025 Location rhs_location = input_locations->InAt(1);
4026 Register rhs_reg = ZERO;
4027 int64_t rhs_imm = 0;
4028 bool use_imm = rhs_location.IsConstant();
4029 if (use_imm) {
4030 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4031 } else {
4032 rhs_reg = rhs_location.AsRegister<Register>();
4033 }
4034
4035 switch (cond) {
4036 case kCondEQ:
4037 case kCondNE:
4038 if (use_imm && IsInt<16>(-rhs_imm)) {
4039 __ Addiu(dst, lhs, -rhs_imm);
4040 } else if (use_imm && IsUint<16>(rhs_imm)) {
4041 __ Xori(dst, lhs, rhs_imm);
4042 } else {
4043 if (use_imm) {
4044 rhs_reg = TMP;
4045 __ LoadConst32(rhs_reg, rhs_imm);
4046 }
4047 __ Xor(dst, lhs, rhs_reg);
4048 }
4049 return (cond == kCondEQ);
4050
4051 case kCondLT:
4052 case kCondGE:
4053 if (use_imm && IsInt<16>(rhs_imm)) {
4054 __ Slti(dst, lhs, rhs_imm);
4055 } else {
4056 if (use_imm) {
4057 rhs_reg = TMP;
4058 __ LoadConst32(rhs_reg, rhs_imm);
4059 }
4060 __ Slt(dst, lhs, rhs_reg);
4061 }
4062 return (cond == kCondGE);
4063
4064 case kCondLE:
4065 case kCondGT:
4066 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4067 // Simulate lhs <= rhs via lhs < rhs + 1.
4068 __ Slti(dst, lhs, rhs_imm + 1);
4069 return (cond == kCondGT);
4070 } else {
4071 if (use_imm) {
4072 rhs_reg = TMP;
4073 __ LoadConst32(rhs_reg, rhs_imm);
4074 }
4075 __ Slt(dst, rhs_reg, lhs);
4076 return (cond == kCondLE);
4077 }
4078
4079 case kCondB:
4080 case kCondAE:
4081 if (use_imm && IsInt<16>(rhs_imm)) {
4082 // Sltiu sign-extends its 16-bit immediate operand before
4083 // the comparison and thus lets us compare directly with
4084 // unsigned values in the ranges [0, 0x7fff] and
4085 // [0xffff8000, 0xffffffff].
4086 __ Sltiu(dst, lhs, rhs_imm);
4087 } else {
4088 if (use_imm) {
4089 rhs_reg = TMP;
4090 __ LoadConst32(rhs_reg, rhs_imm);
4091 }
4092 __ Sltu(dst, lhs, rhs_reg);
4093 }
4094 return (cond == kCondAE);
4095
4096 case kCondBE:
4097 case kCondA:
4098 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4099 // Simulate lhs <= rhs via lhs < rhs + 1.
4100 // Note that this only works if rhs + 1 does not overflow
4101 // to 0, hence the check above.
4102 // Sltiu sign-extends its 16-bit immediate operand before
4103 // the comparison and thus lets us compare directly with
4104 // unsigned values in the ranges [0, 0x7fff] and
4105 // [0xffff8000, 0xffffffff].
4106 __ Sltiu(dst, lhs, rhs_imm + 1);
4107 return (cond == kCondA);
4108 } else {
4109 if (use_imm) {
4110 rhs_reg = TMP;
4111 __ LoadConst32(rhs_reg, rhs_imm);
4112 }
4113 __ Sltu(dst, rhs_reg, lhs);
4114 return (cond == kCondBE);
4115 }
4116 }
4117}
4118
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004119void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4120 LocationSummary* locations,
4121 MipsLabel* label) {
4122 Register lhs = locations->InAt(0).AsRegister<Register>();
4123 Location rhs_location = locations->InAt(1);
4124 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004125 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004126 bool use_imm = rhs_location.IsConstant();
4127 if (use_imm) {
4128 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4129 } else {
4130 rhs_reg = rhs_location.AsRegister<Register>();
4131 }
4132
4133 if (use_imm && rhs_imm == 0) {
4134 switch (cond) {
4135 case kCondEQ:
4136 case kCondBE: // <= 0 if zero
4137 __ Beqz(lhs, label);
4138 break;
4139 case kCondNE:
4140 case kCondA: // > 0 if non-zero
4141 __ Bnez(lhs, label);
4142 break;
4143 case kCondLT:
4144 __ Bltz(lhs, label);
4145 break;
4146 case kCondGE:
4147 __ Bgez(lhs, label);
4148 break;
4149 case kCondLE:
4150 __ Blez(lhs, label);
4151 break;
4152 case kCondGT:
4153 __ Bgtz(lhs, label);
4154 break;
4155 case kCondB: // always false
4156 break;
4157 case kCondAE: // always true
4158 __ B(label);
4159 break;
4160 }
4161 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004162 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4163 if (isR6 || !use_imm) {
4164 if (use_imm) {
4165 rhs_reg = TMP;
4166 __ LoadConst32(rhs_reg, rhs_imm);
4167 }
4168 switch (cond) {
4169 case kCondEQ:
4170 __ Beq(lhs, rhs_reg, label);
4171 break;
4172 case kCondNE:
4173 __ Bne(lhs, rhs_reg, label);
4174 break;
4175 case kCondLT:
4176 __ Blt(lhs, rhs_reg, label);
4177 break;
4178 case kCondGE:
4179 __ Bge(lhs, rhs_reg, label);
4180 break;
4181 case kCondLE:
4182 __ Bge(rhs_reg, lhs, label);
4183 break;
4184 case kCondGT:
4185 __ Blt(rhs_reg, lhs, label);
4186 break;
4187 case kCondB:
4188 __ Bltu(lhs, rhs_reg, label);
4189 break;
4190 case kCondAE:
4191 __ Bgeu(lhs, rhs_reg, label);
4192 break;
4193 case kCondBE:
4194 __ Bgeu(rhs_reg, lhs, label);
4195 break;
4196 case kCondA:
4197 __ Bltu(rhs_reg, lhs, label);
4198 break;
4199 }
4200 } else {
4201 // Special cases for more efficient comparison with constants on R2.
4202 switch (cond) {
4203 case kCondEQ:
4204 __ LoadConst32(TMP, rhs_imm);
4205 __ Beq(lhs, TMP, label);
4206 break;
4207 case kCondNE:
4208 __ LoadConst32(TMP, rhs_imm);
4209 __ Bne(lhs, TMP, label);
4210 break;
4211 case kCondLT:
4212 if (IsInt<16>(rhs_imm)) {
4213 __ Slti(TMP, lhs, rhs_imm);
4214 __ Bnez(TMP, label);
4215 } else {
4216 __ LoadConst32(TMP, rhs_imm);
4217 __ Blt(lhs, TMP, label);
4218 }
4219 break;
4220 case kCondGE:
4221 if (IsInt<16>(rhs_imm)) {
4222 __ Slti(TMP, lhs, rhs_imm);
4223 __ Beqz(TMP, label);
4224 } else {
4225 __ LoadConst32(TMP, rhs_imm);
4226 __ Bge(lhs, TMP, label);
4227 }
4228 break;
4229 case kCondLE:
4230 if (IsInt<16>(rhs_imm + 1)) {
4231 // Simulate lhs <= rhs via lhs < rhs + 1.
4232 __ Slti(TMP, lhs, rhs_imm + 1);
4233 __ Bnez(TMP, label);
4234 } else {
4235 __ LoadConst32(TMP, rhs_imm);
4236 __ Bge(TMP, lhs, label);
4237 }
4238 break;
4239 case kCondGT:
4240 if (IsInt<16>(rhs_imm + 1)) {
4241 // Simulate lhs > rhs via !(lhs < rhs + 1).
4242 __ Slti(TMP, lhs, rhs_imm + 1);
4243 __ Beqz(TMP, label);
4244 } else {
4245 __ LoadConst32(TMP, rhs_imm);
4246 __ Blt(TMP, lhs, label);
4247 }
4248 break;
4249 case kCondB:
4250 if (IsInt<16>(rhs_imm)) {
4251 __ Sltiu(TMP, lhs, rhs_imm);
4252 __ Bnez(TMP, label);
4253 } else {
4254 __ LoadConst32(TMP, rhs_imm);
4255 __ Bltu(lhs, TMP, label);
4256 }
4257 break;
4258 case kCondAE:
4259 if (IsInt<16>(rhs_imm)) {
4260 __ Sltiu(TMP, lhs, rhs_imm);
4261 __ Beqz(TMP, label);
4262 } else {
4263 __ LoadConst32(TMP, rhs_imm);
4264 __ Bgeu(lhs, TMP, label);
4265 }
4266 break;
4267 case kCondBE:
4268 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4269 // Simulate lhs <= rhs via lhs < rhs + 1.
4270 // Note that this only works if rhs + 1 does not overflow
4271 // to 0, hence the check above.
4272 __ Sltiu(TMP, lhs, rhs_imm + 1);
4273 __ Bnez(TMP, label);
4274 } else {
4275 __ LoadConst32(TMP, rhs_imm);
4276 __ Bgeu(TMP, lhs, label);
4277 }
4278 break;
4279 case kCondA:
4280 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4281 // Simulate lhs > rhs via !(lhs < rhs + 1).
4282 // Note that this only works if rhs + 1 does not overflow
4283 // to 0, hence the check above.
4284 __ Sltiu(TMP, lhs, rhs_imm + 1);
4285 __ Beqz(TMP, label);
4286 } else {
4287 __ LoadConst32(TMP, rhs_imm);
4288 __ Bltu(TMP, lhs, label);
4289 }
4290 break;
4291 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004292 }
4293 }
4294}
4295
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004296void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4297 LocationSummary* locations) {
4298 Register dst = locations->Out().AsRegister<Register>();
4299 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4300 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4301 Location rhs_location = locations->InAt(1);
4302 Register rhs_high = ZERO;
4303 Register rhs_low = ZERO;
4304 int64_t imm = 0;
4305 uint32_t imm_high = 0;
4306 uint32_t imm_low = 0;
4307 bool use_imm = rhs_location.IsConstant();
4308 if (use_imm) {
4309 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4310 imm_high = High32Bits(imm);
4311 imm_low = Low32Bits(imm);
4312 } else {
4313 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4314 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4315 }
4316 if (use_imm && imm == 0) {
4317 switch (cond) {
4318 case kCondEQ:
4319 case kCondBE: // <= 0 if zero
4320 __ Or(dst, lhs_high, lhs_low);
4321 __ Sltiu(dst, dst, 1);
4322 break;
4323 case kCondNE:
4324 case kCondA: // > 0 if non-zero
4325 __ Or(dst, lhs_high, lhs_low);
4326 __ Sltu(dst, ZERO, dst);
4327 break;
4328 case kCondLT:
4329 __ Slt(dst, lhs_high, ZERO);
4330 break;
4331 case kCondGE:
4332 __ Slt(dst, lhs_high, ZERO);
4333 __ Xori(dst, dst, 1);
4334 break;
4335 case kCondLE:
4336 __ Or(TMP, lhs_high, lhs_low);
4337 __ Sra(AT, lhs_high, 31);
4338 __ Sltu(dst, AT, TMP);
4339 __ Xori(dst, dst, 1);
4340 break;
4341 case kCondGT:
4342 __ Or(TMP, lhs_high, lhs_low);
4343 __ Sra(AT, lhs_high, 31);
4344 __ Sltu(dst, AT, TMP);
4345 break;
4346 case kCondB: // always false
4347 __ Andi(dst, dst, 0);
4348 break;
4349 case kCondAE: // always true
4350 __ Ori(dst, ZERO, 1);
4351 break;
4352 }
4353 } else if (use_imm) {
4354 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4355 switch (cond) {
4356 case kCondEQ:
4357 __ LoadConst32(TMP, imm_high);
4358 __ Xor(TMP, TMP, lhs_high);
4359 __ LoadConst32(AT, imm_low);
4360 __ Xor(AT, AT, lhs_low);
4361 __ Or(dst, TMP, AT);
4362 __ Sltiu(dst, dst, 1);
4363 break;
4364 case kCondNE:
4365 __ LoadConst32(TMP, imm_high);
4366 __ Xor(TMP, TMP, lhs_high);
4367 __ LoadConst32(AT, imm_low);
4368 __ Xor(AT, AT, lhs_low);
4369 __ Or(dst, TMP, AT);
4370 __ Sltu(dst, ZERO, dst);
4371 break;
4372 case kCondLT:
4373 case kCondGE:
4374 if (dst == lhs_low) {
4375 __ LoadConst32(TMP, imm_low);
4376 __ Sltu(dst, lhs_low, TMP);
4377 }
4378 __ LoadConst32(TMP, imm_high);
4379 __ Slt(AT, lhs_high, TMP);
4380 __ Slt(TMP, TMP, lhs_high);
4381 if (dst != lhs_low) {
4382 __ LoadConst32(dst, imm_low);
4383 __ Sltu(dst, lhs_low, dst);
4384 }
4385 __ Slt(dst, TMP, dst);
4386 __ Or(dst, dst, AT);
4387 if (cond == kCondGE) {
4388 __ Xori(dst, dst, 1);
4389 }
4390 break;
4391 case kCondGT:
4392 case kCondLE:
4393 if (dst == lhs_low) {
4394 __ LoadConst32(TMP, imm_low);
4395 __ Sltu(dst, TMP, lhs_low);
4396 }
4397 __ LoadConst32(TMP, imm_high);
4398 __ Slt(AT, TMP, lhs_high);
4399 __ Slt(TMP, lhs_high, TMP);
4400 if (dst != lhs_low) {
4401 __ LoadConst32(dst, imm_low);
4402 __ Sltu(dst, dst, lhs_low);
4403 }
4404 __ Slt(dst, TMP, dst);
4405 __ Or(dst, dst, AT);
4406 if (cond == kCondLE) {
4407 __ Xori(dst, dst, 1);
4408 }
4409 break;
4410 case kCondB:
4411 case kCondAE:
4412 if (dst == lhs_low) {
4413 __ LoadConst32(TMP, imm_low);
4414 __ Sltu(dst, lhs_low, TMP);
4415 }
4416 __ LoadConst32(TMP, imm_high);
4417 __ Sltu(AT, lhs_high, TMP);
4418 __ Sltu(TMP, TMP, lhs_high);
4419 if (dst != lhs_low) {
4420 __ LoadConst32(dst, imm_low);
4421 __ Sltu(dst, lhs_low, dst);
4422 }
4423 __ Slt(dst, TMP, dst);
4424 __ Or(dst, dst, AT);
4425 if (cond == kCondAE) {
4426 __ Xori(dst, dst, 1);
4427 }
4428 break;
4429 case kCondA:
4430 case kCondBE:
4431 if (dst == lhs_low) {
4432 __ LoadConst32(TMP, imm_low);
4433 __ Sltu(dst, TMP, lhs_low);
4434 }
4435 __ LoadConst32(TMP, imm_high);
4436 __ Sltu(AT, TMP, lhs_high);
4437 __ Sltu(TMP, lhs_high, TMP);
4438 if (dst != lhs_low) {
4439 __ LoadConst32(dst, imm_low);
4440 __ Sltu(dst, dst, lhs_low);
4441 }
4442 __ Slt(dst, TMP, dst);
4443 __ Or(dst, dst, AT);
4444 if (cond == kCondBE) {
4445 __ Xori(dst, dst, 1);
4446 }
4447 break;
4448 }
4449 } else {
4450 switch (cond) {
4451 case kCondEQ:
4452 __ Xor(TMP, lhs_high, rhs_high);
4453 __ Xor(AT, lhs_low, rhs_low);
4454 __ Or(dst, TMP, AT);
4455 __ Sltiu(dst, dst, 1);
4456 break;
4457 case kCondNE:
4458 __ Xor(TMP, lhs_high, rhs_high);
4459 __ Xor(AT, lhs_low, rhs_low);
4460 __ Or(dst, TMP, AT);
4461 __ Sltu(dst, ZERO, dst);
4462 break;
4463 case kCondLT:
4464 case kCondGE:
4465 __ Slt(TMP, rhs_high, lhs_high);
4466 __ Sltu(AT, lhs_low, rhs_low);
4467 __ Slt(TMP, TMP, AT);
4468 __ Slt(AT, lhs_high, rhs_high);
4469 __ Or(dst, AT, TMP);
4470 if (cond == kCondGE) {
4471 __ Xori(dst, dst, 1);
4472 }
4473 break;
4474 case kCondGT:
4475 case kCondLE:
4476 __ Slt(TMP, lhs_high, rhs_high);
4477 __ Sltu(AT, rhs_low, lhs_low);
4478 __ Slt(TMP, TMP, AT);
4479 __ Slt(AT, rhs_high, lhs_high);
4480 __ Or(dst, AT, TMP);
4481 if (cond == kCondLE) {
4482 __ Xori(dst, dst, 1);
4483 }
4484 break;
4485 case kCondB:
4486 case kCondAE:
4487 __ Sltu(TMP, rhs_high, lhs_high);
4488 __ Sltu(AT, lhs_low, rhs_low);
4489 __ Slt(TMP, TMP, AT);
4490 __ Sltu(AT, lhs_high, rhs_high);
4491 __ Or(dst, AT, TMP);
4492 if (cond == kCondAE) {
4493 __ Xori(dst, dst, 1);
4494 }
4495 break;
4496 case kCondA:
4497 case kCondBE:
4498 __ Sltu(TMP, lhs_high, rhs_high);
4499 __ Sltu(AT, rhs_low, lhs_low);
4500 __ Slt(TMP, TMP, AT);
4501 __ Sltu(AT, rhs_high, lhs_high);
4502 __ Or(dst, AT, TMP);
4503 if (cond == kCondBE) {
4504 __ Xori(dst, dst, 1);
4505 }
4506 break;
4507 }
4508 }
4509}
4510
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004511void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4512 LocationSummary* locations,
4513 MipsLabel* label) {
4514 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4515 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4516 Location rhs_location = locations->InAt(1);
4517 Register rhs_high = ZERO;
4518 Register rhs_low = ZERO;
4519 int64_t imm = 0;
4520 uint32_t imm_high = 0;
4521 uint32_t imm_low = 0;
4522 bool use_imm = rhs_location.IsConstant();
4523 if (use_imm) {
4524 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4525 imm_high = High32Bits(imm);
4526 imm_low = Low32Bits(imm);
4527 } else {
4528 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4529 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4530 }
4531
4532 if (use_imm && imm == 0) {
4533 switch (cond) {
4534 case kCondEQ:
4535 case kCondBE: // <= 0 if zero
4536 __ Or(TMP, lhs_high, lhs_low);
4537 __ Beqz(TMP, label);
4538 break;
4539 case kCondNE:
4540 case kCondA: // > 0 if non-zero
4541 __ Or(TMP, lhs_high, lhs_low);
4542 __ Bnez(TMP, label);
4543 break;
4544 case kCondLT:
4545 __ Bltz(lhs_high, label);
4546 break;
4547 case kCondGE:
4548 __ Bgez(lhs_high, label);
4549 break;
4550 case kCondLE:
4551 __ Or(TMP, lhs_high, lhs_low);
4552 __ Sra(AT, lhs_high, 31);
4553 __ Bgeu(AT, TMP, label);
4554 break;
4555 case kCondGT:
4556 __ Or(TMP, lhs_high, lhs_low);
4557 __ Sra(AT, lhs_high, 31);
4558 __ Bltu(AT, TMP, label);
4559 break;
4560 case kCondB: // always false
4561 break;
4562 case kCondAE: // always true
4563 __ B(label);
4564 break;
4565 }
4566 } else if (use_imm) {
4567 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4568 switch (cond) {
4569 case kCondEQ:
4570 __ LoadConst32(TMP, imm_high);
4571 __ Xor(TMP, TMP, lhs_high);
4572 __ LoadConst32(AT, imm_low);
4573 __ Xor(AT, AT, lhs_low);
4574 __ Or(TMP, TMP, AT);
4575 __ Beqz(TMP, label);
4576 break;
4577 case kCondNE:
4578 __ LoadConst32(TMP, imm_high);
4579 __ Xor(TMP, TMP, lhs_high);
4580 __ LoadConst32(AT, imm_low);
4581 __ Xor(AT, AT, lhs_low);
4582 __ Or(TMP, TMP, AT);
4583 __ Bnez(TMP, label);
4584 break;
4585 case kCondLT:
4586 __ LoadConst32(TMP, imm_high);
4587 __ Blt(lhs_high, TMP, label);
4588 __ Slt(TMP, TMP, lhs_high);
4589 __ LoadConst32(AT, imm_low);
4590 __ Sltu(AT, lhs_low, AT);
4591 __ Blt(TMP, AT, label);
4592 break;
4593 case kCondGE:
4594 __ LoadConst32(TMP, imm_high);
4595 __ Blt(TMP, lhs_high, label);
4596 __ Slt(TMP, lhs_high, TMP);
4597 __ LoadConst32(AT, imm_low);
4598 __ Sltu(AT, lhs_low, AT);
4599 __ Or(TMP, TMP, AT);
4600 __ Beqz(TMP, label);
4601 break;
4602 case kCondLE:
4603 __ LoadConst32(TMP, imm_high);
4604 __ Blt(lhs_high, TMP, label);
4605 __ Slt(TMP, TMP, lhs_high);
4606 __ LoadConst32(AT, imm_low);
4607 __ Sltu(AT, AT, lhs_low);
4608 __ Or(TMP, TMP, AT);
4609 __ Beqz(TMP, label);
4610 break;
4611 case kCondGT:
4612 __ LoadConst32(TMP, imm_high);
4613 __ Blt(TMP, lhs_high, label);
4614 __ Slt(TMP, lhs_high, TMP);
4615 __ LoadConst32(AT, imm_low);
4616 __ Sltu(AT, AT, lhs_low);
4617 __ Blt(TMP, AT, label);
4618 break;
4619 case kCondB:
4620 __ LoadConst32(TMP, imm_high);
4621 __ Bltu(lhs_high, TMP, label);
4622 __ Sltu(TMP, TMP, lhs_high);
4623 __ LoadConst32(AT, imm_low);
4624 __ Sltu(AT, lhs_low, AT);
4625 __ Blt(TMP, AT, label);
4626 break;
4627 case kCondAE:
4628 __ LoadConst32(TMP, imm_high);
4629 __ Bltu(TMP, lhs_high, label);
4630 __ Sltu(TMP, lhs_high, TMP);
4631 __ LoadConst32(AT, imm_low);
4632 __ Sltu(AT, lhs_low, AT);
4633 __ Or(TMP, TMP, AT);
4634 __ Beqz(TMP, label);
4635 break;
4636 case kCondBE:
4637 __ LoadConst32(TMP, imm_high);
4638 __ Bltu(lhs_high, TMP, label);
4639 __ Sltu(TMP, TMP, lhs_high);
4640 __ LoadConst32(AT, imm_low);
4641 __ Sltu(AT, AT, lhs_low);
4642 __ Or(TMP, TMP, AT);
4643 __ Beqz(TMP, label);
4644 break;
4645 case kCondA:
4646 __ LoadConst32(TMP, imm_high);
4647 __ Bltu(TMP, lhs_high, label);
4648 __ Sltu(TMP, lhs_high, TMP);
4649 __ LoadConst32(AT, imm_low);
4650 __ Sltu(AT, AT, lhs_low);
4651 __ Blt(TMP, AT, label);
4652 break;
4653 }
4654 } else {
4655 switch (cond) {
4656 case kCondEQ:
4657 __ Xor(TMP, lhs_high, rhs_high);
4658 __ Xor(AT, lhs_low, rhs_low);
4659 __ Or(TMP, TMP, AT);
4660 __ Beqz(TMP, label);
4661 break;
4662 case kCondNE:
4663 __ Xor(TMP, lhs_high, rhs_high);
4664 __ Xor(AT, lhs_low, rhs_low);
4665 __ Or(TMP, TMP, AT);
4666 __ Bnez(TMP, label);
4667 break;
4668 case kCondLT:
4669 __ Blt(lhs_high, rhs_high, label);
4670 __ Slt(TMP, rhs_high, lhs_high);
4671 __ Sltu(AT, lhs_low, rhs_low);
4672 __ Blt(TMP, AT, label);
4673 break;
4674 case kCondGE:
4675 __ Blt(rhs_high, lhs_high, label);
4676 __ Slt(TMP, lhs_high, rhs_high);
4677 __ Sltu(AT, lhs_low, rhs_low);
4678 __ Or(TMP, TMP, AT);
4679 __ Beqz(TMP, label);
4680 break;
4681 case kCondLE:
4682 __ Blt(lhs_high, rhs_high, label);
4683 __ Slt(TMP, rhs_high, lhs_high);
4684 __ Sltu(AT, rhs_low, lhs_low);
4685 __ Or(TMP, TMP, AT);
4686 __ Beqz(TMP, label);
4687 break;
4688 case kCondGT:
4689 __ Blt(rhs_high, lhs_high, label);
4690 __ Slt(TMP, lhs_high, rhs_high);
4691 __ Sltu(AT, rhs_low, lhs_low);
4692 __ Blt(TMP, AT, label);
4693 break;
4694 case kCondB:
4695 __ Bltu(lhs_high, rhs_high, label);
4696 __ Sltu(TMP, rhs_high, lhs_high);
4697 __ Sltu(AT, lhs_low, rhs_low);
4698 __ Blt(TMP, AT, label);
4699 break;
4700 case kCondAE:
4701 __ Bltu(rhs_high, lhs_high, label);
4702 __ Sltu(TMP, lhs_high, rhs_high);
4703 __ Sltu(AT, lhs_low, rhs_low);
4704 __ Or(TMP, TMP, AT);
4705 __ Beqz(TMP, label);
4706 break;
4707 case kCondBE:
4708 __ Bltu(lhs_high, rhs_high, label);
4709 __ Sltu(TMP, rhs_high, lhs_high);
4710 __ Sltu(AT, rhs_low, lhs_low);
4711 __ Or(TMP, TMP, AT);
4712 __ Beqz(TMP, label);
4713 break;
4714 case kCondA:
4715 __ Bltu(rhs_high, lhs_high, label);
4716 __ Sltu(TMP, lhs_high, rhs_high);
4717 __ Sltu(AT, rhs_low, lhs_low);
4718 __ Blt(TMP, AT, label);
4719 break;
4720 }
4721 }
4722}
4723
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004724void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4725 bool gt_bias,
4726 Primitive::Type type,
4727 LocationSummary* locations) {
4728 Register dst = locations->Out().AsRegister<Register>();
4729 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4730 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4731 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4732 if (type == Primitive::kPrimFloat) {
4733 if (isR6) {
4734 switch (cond) {
4735 case kCondEQ:
4736 __ CmpEqS(FTMP, lhs, rhs);
4737 __ Mfc1(dst, FTMP);
4738 __ Andi(dst, dst, 1);
4739 break;
4740 case kCondNE:
4741 __ CmpEqS(FTMP, lhs, rhs);
4742 __ Mfc1(dst, FTMP);
4743 __ Addiu(dst, dst, 1);
4744 break;
4745 case kCondLT:
4746 if (gt_bias) {
4747 __ CmpLtS(FTMP, lhs, rhs);
4748 } else {
4749 __ CmpUltS(FTMP, lhs, rhs);
4750 }
4751 __ Mfc1(dst, FTMP);
4752 __ Andi(dst, dst, 1);
4753 break;
4754 case kCondLE:
4755 if (gt_bias) {
4756 __ CmpLeS(FTMP, lhs, rhs);
4757 } else {
4758 __ CmpUleS(FTMP, lhs, rhs);
4759 }
4760 __ Mfc1(dst, FTMP);
4761 __ Andi(dst, dst, 1);
4762 break;
4763 case kCondGT:
4764 if (gt_bias) {
4765 __ CmpUltS(FTMP, rhs, lhs);
4766 } else {
4767 __ CmpLtS(FTMP, rhs, lhs);
4768 }
4769 __ Mfc1(dst, FTMP);
4770 __ Andi(dst, dst, 1);
4771 break;
4772 case kCondGE:
4773 if (gt_bias) {
4774 __ CmpUleS(FTMP, rhs, lhs);
4775 } else {
4776 __ CmpLeS(FTMP, rhs, lhs);
4777 }
4778 __ Mfc1(dst, FTMP);
4779 __ Andi(dst, dst, 1);
4780 break;
4781 default:
4782 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4783 UNREACHABLE();
4784 }
4785 } else {
4786 switch (cond) {
4787 case kCondEQ:
4788 __ CeqS(0, lhs, rhs);
4789 __ LoadConst32(dst, 1);
4790 __ Movf(dst, ZERO, 0);
4791 break;
4792 case kCondNE:
4793 __ CeqS(0, lhs, rhs);
4794 __ LoadConst32(dst, 1);
4795 __ Movt(dst, ZERO, 0);
4796 break;
4797 case kCondLT:
4798 if (gt_bias) {
4799 __ ColtS(0, lhs, rhs);
4800 } else {
4801 __ CultS(0, lhs, rhs);
4802 }
4803 __ LoadConst32(dst, 1);
4804 __ Movf(dst, ZERO, 0);
4805 break;
4806 case kCondLE:
4807 if (gt_bias) {
4808 __ ColeS(0, lhs, rhs);
4809 } else {
4810 __ CuleS(0, lhs, rhs);
4811 }
4812 __ LoadConst32(dst, 1);
4813 __ Movf(dst, ZERO, 0);
4814 break;
4815 case kCondGT:
4816 if (gt_bias) {
4817 __ CultS(0, rhs, lhs);
4818 } else {
4819 __ ColtS(0, rhs, lhs);
4820 }
4821 __ LoadConst32(dst, 1);
4822 __ Movf(dst, ZERO, 0);
4823 break;
4824 case kCondGE:
4825 if (gt_bias) {
4826 __ CuleS(0, rhs, lhs);
4827 } else {
4828 __ ColeS(0, rhs, lhs);
4829 }
4830 __ LoadConst32(dst, 1);
4831 __ Movf(dst, ZERO, 0);
4832 break;
4833 default:
4834 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4835 UNREACHABLE();
4836 }
4837 }
4838 } else {
4839 DCHECK_EQ(type, Primitive::kPrimDouble);
4840 if (isR6) {
4841 switch (cond) {
4842 case kCondEQ:
4843 __ CmpEqD(FTMP, lhs, rhs);
4844 __ Mfc1(dst, FTMP);
4845 __ Andi(dst, dst, 1);
4846 break;
4847 case kCondNE:
4848 __ CmpEqD(FTMP, lhs, rhs);
4849 __ Mfc1(dst, FTMP);
4850 __ Addiu(dst, dst, 1);
4851 break;
4852 case kCondLT:
4853 if (gt_bias) {
4854 __ CmpLtD(FTMP, lhs, rhs);
4855 } else {
4856 __ CmpUltD(FTMP, lhs, rhs);
4857 }
4858 __ Mfc1(dst, FTMP);
4859 __ Andi(dst, dst, 1);
4860 break;
4861 case kCondLE:
4862 if (gt_bias) {
4863 __ CmpLeD(FTMP, lhs, rhs);
4864 } else {
4865 __ CmpUleD(FTMP, lhs, rhs);
4866 }
4867 __ Mfc1(dst, FTMP);
4868 __ Andi(dst, dst, 1);
4869 break;
4870 case kCondGT:
4871 if (gt_bias) {
4872 __ CmpUltD(FTMP, rhs, lhs);
4873 } else {
4874 __ CmpLtD(FTMP, rhs, lhs);
4875 }
4876 __ Mfc1(dst, FTMP);
4877 __ Andi(dst, dst, 1);
4878 break;
4879 case kCondGE:
4880 if (gt_bias) {
4881 __ CmpUleD(FTMP, rhs, lhs);
4882 } else {
4883 __ CmpLeD(FTMP, rhs, lhs);
4884 }
4885 __ Mfc1(dst, FTMP);
4886 __ Andi(dst, dst, 1);
4887 break;
4888 default:
4889 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4890 UNREACHABLE();
4891 }
4892 } else {
4893 switch (cond) {
4894 case kCondEQ:
4895 __ CeqD(0, lhs, rhs);
4896 __ LoadConst32(dst, 1);
4897 __ Movf(dst, ZERO, 0);
4898 break;
4899 case kCondNE:
4900 __ CeqD(0, lhs, rhs);
4901 __ LoadConst32(dst, 1);
4902 __ Movt(dst, ZERO, 0);
4903 break;
4904 case kCondLT:
4905 if (gt_bias) {
4906 __ ColtD(0, lhs, rhs);
4907 } else {
4908 __ CultD(0, lhs, rhs);
4909 }
4910 __ LoadConst32(dst, 1);
4911 __ Movf(dst, ZERO, 0);
4912 break;
4913 case kCondLE:
4914 if (gt_bias) {
4915 __ ColeD(0, lhs, rhs);
4916 } else {
4917 __ CuleD(0, lhs, rhs);
4918 }
4919 __ LoadConst32(dst, 1);
4920 __ Movf(dst, ZERO, 0);
4921 break;
4922 case kCondGT:
4923 if (gt_bias) {
4924 __ CultD(0, rhs, lhs);
4925 } else {
4926 __ ColtD(0, rhs, lhs);
4927 }
4928 __ LoadConst32(dst, 1);
4929 __ Movf(dst, ZERO, 0);
4930 break;
4931 case kCondGE:
4932 if (gt_bias) {
4933 __ CuleD(0, rhs, lhs);
4934 } else {
4935 __ ColeD(0, rhs, lhs);
4936 }
4937 __ LoadConst32(dst, 1);
4938 __ Movf(dst, ZERO, 0);
4939 break;
4940 default:
4941 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4942 UNREACHABLE();
4943 }
4944 }
4945 }
4946}
4947
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004948bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
4949 bool gt_bias,
4950 Primitive::Type type,
4951 LocationSummary* input_locations,
4952 int cc) {
4953 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
4954 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
4955 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
4956 if (type == Primitive::kPrimFloat) {
4957 switch (cond) {
4958 case kCondEQ:
4959 __ CeqS(cc, lhs, rhs);
4960 return false;
4961 case kCondNE:
4962 __ CeqS(cc, lhs, rhs);
4963 return true;
4964 case kCondLT:
4965 if (gt_bias) {
4966 __ ColtS(cc, lhs, rhs);
4967 } else {
4968 __ CultS(cc, lhs, rhs);
4969 }
4970 return false;
4971 case kCondLE:
4972 if (gt_bias) {
4973 __ ColeS(cc, lhs, rhs);
4974 } else {
4975 __ CuleS(cc, lhs, rhs);
4976 }
4977 return false;
4978 case kCondGT:
4979 if (gt_bias) {
4980 __ CultS(cc, rhs, lhs);
4981 } else {
4982 __ ColtS(cc, rhs, lhs);
4983 }
4984 return false;
4985 case kCondGE:
4986 if (gt_bias) {
4987 __ CuleS(cc, rhs, lhs);
4988 } else {
4989 __ ColeS(cc, rhs, lhs);
4990 }
4991 return false;
4992 default:
4993 LOG(FATAL) << "Unexpected non-floating-point condition";
4994 UNREACHABLE();
4995 }
4996 } else {
4997 DCHECK_EQ(type, Primitive::kPrimDouble);
4998 switch (cond) {
4999 case kCondEQ:
5000 __ CeqD(cc, lhs, rhs);
5001 return false;
5002 case kCondNE:
5003 __ CeqD(cc, lhs, rhs);
5004 return true;
5005 case kCondLT:
5006 if (gt_bias) {
5007 __ ColtD(cc, lhs, rhs);
5008 } else {
5009 __ CultD(cc, lhs, rhs);
5010 }
5011 return false;
5012 case kCondLE:
5013 if (gt_bias) {
5014 __ ColeD(cc, lhs, rhs);
5015 } else {
5016 __ CuleD(cc, lhs, rhs);
5017 }
5018 return false;
5019 case kCondGT:
5020 if (gt_bias) {
5021 __ CultD(cc, rhs, lhs);
5022 } else {
5023 __ ColtD(cc, rhs, lhs);
5024 }
5025 return false;
5026 case kCondGE:
5027 if (gt_bias) {
5028 __ CuleD(cc, rhs, lhs);
5029 } else {
5030 __ ColeD(cc, rhs, lhs);
5031 }
5032 return false;
5033 default:
5034 LOG(FATAL) << "Unexpected non-floating-point condition";
5035 UNREACHABLE();
5036 }
5037 }
5038}
5039
5040bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5041 bool gt_bias,
5042 Primitive::Type type,
5043 LocationSummary* input_locations,
5044 FRegister dst) {
5045 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5046 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5047 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5048 if (type == Primitive::kPrimFloat) {
5049 switch (cond) {
5050 case kCondEQ:
5051 __ CmpEqS(dst, lhs, rhs);
5052 return false;
5053 case kCondNE:
5054 __ CmpEqS(dst, lhs, rhs);
5055 return true;
5056 case kCondLT:
5057 if (gt_bias) {
5058 __ CmpLtS(dst, lhs, rhs);
5059 } else {
5060 __ CmpUltS(dst, lhs, rhs);
5061 }
5062 return false;
5063 case kCondLE:
5064 if (gt_bias) {
5065 __ CmpLeS(dst, lhs, rhs);
5066 } else {
5067 __ CmpUleS(dst, lhs, rhs);
5068 }
5069 return false;
5070 case kCondGT:
5071 if (gt_bias) {
5072 __ CmpUltS(dst, rhs, lhs);
5073 } else {
5074 __ CmpLtS(dst, rhs, lhs);
5075 }
5076 return false;
5077 case kCondGE:
5078 if (gt_bias) {
5079 __ CmpUleS(dst, rhs, lhs);
5080 } else {
5081 __ CmpLeS(dst, rhs, lhs);
5082 }
5083 return false;
5084 default:
5085 LOG(FATAL) << "Unexpected non-floating-point condition";
5086 UNREACHABLE();
5087 }
5088 } else {
5089 DCHECK_EQ(type, Primitive::kPrimDouble);
5090 switch (cond) {
5091 case kCondEQ:
5092 __ CmpEqD(dst, lhs, rhs);
5093 return false;
5094 case kCondNE:
5095 __ CmpEqD(dst, lhs, rhs);
5096 return true;
5097 case kCondLT:
5098 if (gt_bias) {
5099 __ CmpLtD(dst, lhs, rhs);
5100 } else {
5101 __ CmpUltD(dst, lhs, rhs);
5102 }
5103 return false;
5104 case kCondLE:
5105 if (gt_bias) {
5106 __ CmpLeD(dst, lhs, rhs);
5107 } else {
5108 __ CmpUleD(dst, lhs, rhs);
5109 }
5110 return false;
5111 case kCondGT:
5112 if (gt_bias) {
5113 __ CmpUltD(dst, rhs, lhs);
5114 } else {
5115 __ CmpLtD(dst, rhs, lhs);
5116 }
5117 return false;
5118 case kCondGE:
5119 if (gt_bias) {
5120 __ CmpUleD(dst, rhs, lhs);
5121 } else {
5122 __ CmpLeD(dst, rhs, lhs);
5123 }
5124 return false;
5125 default:
5126 LOG(FATAL) << "Unexpected non-floating-point condition";
5127 UNREACHABLE();
5128 }
5129 }
5130}
5131
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005132void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5133 bool gt_bias,
5134 Primitive::Type type,
5135 LocationSummary* locations,
5136 MipsLabel* label) {
5137 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5138 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5139 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5140 if (type == Primitive::kPrimFloat) {
5141 if (isR6) {
5142 switch (cond) {
5143 case kCondEQ:
5144 __ CmpEqS(FTMP, lhs, rhs);
5145 __ Bc1nez(FTMP, label);
5146 break;
5147 case kCondNE:
5148 __ CmpEqS(FTMP, lhs, rhs);
5149 __ Bc1eqz(FTMP, label);
5150 break;
5151 case kCondLT:
5152 if (gt_bias) {
5153 __ CmpLtS(FTMP, lhs, rhs);
5154 } else {
5155 __ CmpUltS(FTMP, lhs, rhs);
5156 }
5157 __ Bc1nez(FTMP, label);
5158 break;
5159 case kCondLE:
5160 if (gt_bias) {
5161 __ CmpLeS(FTMP, lhs, rhs);
5162 } else {
5163 __ CmpUleS(FTMP, lhs, rhs);
5164 }
5165 __ Bc1nez(FTMP, label);
5166 break;
5167 case kCondGT:
5168 if (gt_bias) {
5169 __ CmpUltS(FTMP, rhs, lhs);
5170 } else {
5171 __ CmpLtS(FTMP, rhs, lhs);
5172 }
5173 __ Bc1nez(FTMP, label);
5174 break;
5175 case kCondGE:
5176 if (gt_bias) {
5177 __ CmpUleS(FTMP, rhs, lhs);
5178 } else {
5179 __ CmpLeS(FTMP, rhs, lhs);
5180 }
5181 __ Bc1nez(FTMP, label);
5182 break;
5183 default:
5184 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005185 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005186 }
5187 } else {
5188 switch (cond) {
5189 case kCondEQ:
5190 __ CeqS(0, lhs, rhs);
5191 __ Bc1t(0, label);
5192 break;
5193 case kCondNE:
5194 __ CeqS(0, lhs, rhs);
5195 __ Bc1f(0, label);
5196 break;
5197 case kCondLT:
5198 if (gt_bias) {
5199 __ ColtS(0, lhs, rhs);
5200 } else {
5201 __ CultS(0, lhs, rhs);
5202 }
5203 __ Bc1t(0, label);
5204 break;
5205 case kCondLE:
5206 if (gt_bias) {
5207 __ ColeS(0, lhs, rhs);
5208 } else {
5209 __ CuleS(0, lhs, rhs);
5210 }
5211 __ Bc1t(0, label);
5212 break;
5213 case kCondGT:
5214 if (gt_bias) {
5215 __ CultS(0, rhs, lhs);
5216 } else {
5217 __ ColtS(0, rhs, lhs);
5218 }
5219 __ Bc1t(0, label);
5220 break;
5221 case kCondGE:
5222 if (gt_bias) {
5223 __ CuleS(0, rhs, lhs);
5224 } else {
5225 __ ColeS(0, rhs, lhs);
5226 }
5227 __ Bc1t(0, label);
5228 break;
5229 default:
5230 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005231 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005232 }
5233 }
5234 } else {
5235 DCHECK_EQ(type, Primitive::kPrimDouble);
5236 if (isR6) {
5237 switch (cond) {
5238 case kCondEQ:
5239 __ CmpEqD(FTMP, lhs, rhs);
5240 __ Bc1nez(FTMP, label);
5241 break;
5242 case kCondNE:
5243 __ CmpEqD(FTMP, lhs, rhs);
5244 __ Bc1eqz(FTMP, label);
5245 break;
5246 case kCondLT:
5247 if (gt_bias) {
5248 __ CmpLtD(FTMP, lhs, rhs);
5249 } else {
5250 __ CmpUltD(FTMP, lhs, rhs);
5251 }
5252 __ Bc1nez(FTMP, label);
5253 break;
5254 case kCondLE:
5255 if (gt_bias) {
5256 __ CmpLeD(FTMP, lhs, rhs);
5257 } else {
5258 __ CmpUleD(FTMP, lhs, rhs);
5259 }
5260 __ Bc1nez(FTMP, label);
5261 break;
5262 case kCondGT:
5263 if (gt_bias) {
5264 __ CmpUltD(FTMP, rhs, lhs);
5265 } else {
5266 __ CmpLtD(FTMP, rhs, lhs);
5267 }
5268 __ Bc1nez(FTMP, label);
5269 break;
5270 case kCondGE:
5271 if (gt_bias) {
5272 __ CmpUleD(FTMP, rhs, lhs);
5273 } else {
5274 __ CmpLeD(FTMP, rhs, lhs);
5275 }
5276 __ Bc1nez(FTMP, label);
5277 break;
5278 default:
5279 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005280 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005281 }
5282 } else {
5283 switch (cond) {
5284 case kCondEQ:
5285 __ CeqD(0, lhs, rhs);
5286 __ Bc1t(0, label);
5287 break;
5288 case kCondNE:
5289 __ CeqD(0, lhs, rhs);
5290 __ Bc1f(0, label);
5291 break;
5292 case kCondLT:
5293 if (gt_bias) {
5294 __ ColtD(0, lhs, rhs);
5295 } else {
5296 __ CultD(0, lhs, rhs);
5297 }
5298 __ Bc1t(0, label);
5299 break;
5300 case kCondLE:
5301 if (gt_bias) {
5302 __ ColeD(0, lhs, rhs);
5303 } else {
5304 __ CuleD(0, lhs, rhs);
5305 }
5306 __ Bc1t(0, label);
5307 break;
5308 case kCondGT:
5309 if (gt_bias) {
5310 __ CultD(0, rhs, lhs);
5311 } else {
5312 __ ColtD(0, rhs, lhs);
5313 }
5314 __ Bc1t(0, label);
5315 break;
5316 case kCondGE:
5317 if (gt_bias) {
5318 __ CuleD(0, rhs, lhs);
5319 } else {
5320 __ ColeD(0, rhs, lhs);
5321 }
5322 __ Bc1t(0, label);
5323 break;
5324 default:
5325 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005326 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005327 }
5328 }
5329 }
5330}
5331
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005332void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005333 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005334 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005335 MipsLabel* false_target) {
5336 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005337
David Brazdil0debae72015-11-12 18:37:00 +00005338 if (true_target == nullptr && false_target == nullptr) {
5339 // Nothing to do. The code always falls through.
5340 return;
5341 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005342 // Constant condition, statically compared against "true" (integer value 1).
5343 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005344 if (true_target != nullptr) {
5345 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005346 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005347 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005348 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005349 if (false_target != nullptr) {
5350 __ B(false_target);
5351 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005352 }
David Brazdil0debae72015-11-12 18:37:00 +00005353 return;
5354 }
5355
5356 // The following code generates these patterns:
5357 // (1) true_target == nullptr && false_target != nullptr
5358 // - opposite condition true => branch to false_target
5359 // (2) true_target != nullptr && false_target == nullptr
5360 // - condition true => branch to true_target
5361 // (3) true_target != nullptr && false_target != nullptr
5362 // - condition true => branch to true_target
5363 // - branch to false_target
5364 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005365 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005366 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005367 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005368 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005369 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5370 } else {
5371 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5372 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005373 } else {
5374 // The condition instruction has not been materialized, use its inputs as
5375 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005376 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005377 Primitive::Type type = condition->InputAt(0)->GetType();
5378 LocationSummary* locations = cond->GetLocations();
5379 IfCondition if_cond = condition->GetCondition();
5380 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005381
David Brazdil0debae72015-11-12 18:37:00 +00005382 if (true_target == nullptr) {
5383 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005384 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005385 }
5386
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005387 switch (type) {
5388 default:
5389 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5390 break;
5391 case Primitive::kPrimLong:
5392 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5393 break;
5394 case Primitive::kPrimFloat:
5395 case Primitive::kPrimDouble:
5396 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5397 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005398 }
5399 }
David Brazdil0debae72015-11-12 18:37:00 +00005400
5401 // If neither branch falls through (case 3), the conditional branch to `true_target`
5402 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5403 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005404 __ B(false_target);
5405 }
5406}
5407
5408void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5409 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005410 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005411 locations->SetInAt(0, Location::RequiresRegister());
5412 }
5413}
5414
5415void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005416 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5417 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5418 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5419 nullptr : codegen_->GetLabelOf(true_successor);
5420 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5421 nullptr : codegen_->GetLabelOf(false_successor);
5422 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005423}
5424
5425void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5426 LocationSummary* locations = new (GetGraph()->GetArena())
5427 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005428 InvokeRuntimeCallingConvention calling_convention;
5429 RegisterSet caller_saves = RegisterSet::Empty();
5430 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5431 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005432 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005433 locations->SetInAt(0, Location::RequiresRegister());
5434 }
5435}
5436
5437void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005438 SlowPathCodeMIPS* slow_path =
5439 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005440 GenerateTestAndBranch(deoptimize,
5441 /* condition_input_index */ 0,
5442 slow_path->GetEntryLabel(),
5443 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005444}
5445
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005446// This function returns true if a conditional move can be generated for HSelect.
5447// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5448// branches and regular moves.
5449//
5450// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5451//
5452// While determining feasibility of a conditional move and setting inputs/outputs
5453// are two distinct tasks, this function does both because they share quite a bit
5454// of common logic.
5455static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5456 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5457 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5458 HCondition* condition = cond->AsCondition();
5459
5460 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5461 Primitive::Type dst_type = select->GetType();
5462
5463 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5464 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5465 bool is_true_value_zero_constant =
5466 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5467 bool is_false_value_zero_constant =
5468 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5469
5470 bool can_move_conditionally = false;
5471 bool use_const_for_false_in = false;
5472 bool use_const_for_true_in = false;
5473
5474 if (!cond->IsConstant()) {
5475 switch (cond_type) {
5476 default:
5477 switch (dst_type) {
5478 default:
5479 // Moving int on int condition.
5480 if (is_r6) {
5481 if (is_true_value_zero_constant) {
5482 // seleqz out_reg, false_reg, cond_reg
5483 can_move_conditionally = true;
5484 use_const_for_true_in = true;
5485 } else if (is_false_value_zero_constant) {
5486 // selnez out_reg, true_reg, cond_reg
5487 can_move_conditionally = true;
5488 use_const_for_false_in = true;
5489 } else if (materialized) {
5490 // Not materializing unmaterialized int conditions
5491 // to keep the instruction count low.
5492 // selnez AT, true_reg, cond_reg
5493 // seleqz TMP, false_reg, cond_reg
5494 // or out_reg, AT, TMP
5495 can_move_conditionally = true;
5496 }
5497 } else {
5498 // movn out_reg, true_reg/ZERO, cond_reg
5499 can_move_conditionally = true;
5500 use_const_for_true_in = is_true_value_zero_constant;
5501 }
5502 break;
5503 case Primitive::kPrimLong:
5504 // Moving long on int condition.
5505 if (is_r6) {
5506 if (is_true_value_zero_constant) {
5507 // seleqz out_reg_lo, false_reg_lo, cond_reg
5508 // seleqz out_reg_hi, false_reg_hi, cond_reg
5509 can_move_conditionally = true;
5510 use_const_for_true_in = true;
5511 } else if (is_false_value_zero_constant) {
5512 // selnez out_reg_lo, true_reg_lo, cond_reg
5513 // selnez out_reg_hi, true_reg_hi, cond_reg
5514 can_move_conditionally = true;
5515 use_const_for_false_in = true;
5516 }
5517 // Other long conditional moves would generate 6+ instructions,
5518 // which is too many.
5519 } else {
5520 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5521 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5522 can_move_conditionally = true;
5523 use_const_for_true_in = is_true_value_zero_constant;
5524 }
5525 break;
5526 case Primitive::kPrimFloat:
5527 case Primitive::kPrimDouble:
5528 // Moving float/double on int condition.
5529 if (is_r6) {
5530 if (materialized) {
5531 // Not materializing unmaterialized int conditions
5532 // to keep the instruction count low.
5533 can_move_conditionally = true;
5534 if (is_true_value_zero_constant) {
5535 // sltu TMP, ZERO, cond_reg
5536 // mtc1 TMP, temp_cond_reg
5537 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5538 use_const_for_true_in = true;
5539 } else if (is_false_value_zero_constant) {
5540 // sltu TMP, ZERO, cond_reg
5541 // mtc1 TMP, temp_cond_reg
5542 // selnez.fmt out_reg, true_reg, temp_cond_reg
5543 use_const_for_false_in = true;
5544 } else {
5545 // sltu TMP, ZERO, cond_reg
5546 // mtc1 TMP, temp_cond_reg
5547 // sel.fmt temp_cond_reg, false_reg, true_reg
5548 // mov.fmt out_reg, temp_cond_reg
5549 }
5550 }
5551 } else {
5552 // movn.fmt out_reg, true_reg, cond_reg
5553 can_move_conditionally = true;
5554 }
5555 break;
5556 }
5557 break;
5558 case Primitive::kPrimLong:
5559 // We don't materialize long comparison now
5560 // and use conditional branches instead.
5561 break;
5562 case Primitive::kPrimFloat:
5563 case Primitive::kPrimDouble:
5564 switch (dst_type) {
5565 default:
5566 // Moving int on float/double condition.
5567 if (is_r6) {
5568 if (is_true_value_zero_constant) {
5569 // mfc1 TMP, temp_cond_reg
5570 // seleqz out_reg, false_reg, TMP
5571 can_move_conditionally = true;
5572 use_const_for_true_in = true;
5573 } else if (is_false_value_zero_constant) {
5574 // mfc1 TMP, temp_cond_reg
5575 // selnez out_reg, true_reg, TMP
5576 can_move_conditionally = true;
5577 use_const_for_false_in = true;
5578 } else {
5579 // mfc1 TMP, temp_cond_reg
5580 // selnez AT, true_reg, TMP
5581 // seleqz TMP, false_reg, TMP
5582 // or out_reg, AT, TMP
5583 can_move_conditionally = true;
5584 }
5585 } else {
5586 // movt out_reg, true_reg/ZERO, cc
5587 can_move_conditionally = true;
5588 use_const_for_true_in = is_true_value_zero_constant;
5589 }
5590 break;
5591 case Primitive::kPrimLong:
5592 // Moving long on float/double condition.
5593 if (is_r6) {
5594 if (is_true_value_zero_constant) {
5595 // mfc1 TMP, temp_cond_reg
5596 // seleqz out_reg_lo, false_reg_lo, TMP
5597 // seleqz out_reg_hi, false_reg_hi, TMP
5598 can_move_conditionally = true;
5599 use_const_for_true_in = true;
5600 } else if (is_false_value_zero_constant) {
5601 // mfc1 TMP, temp_cond_reg
5602 // selnez out_reg_lo, true_reg_lo, TMP
5603 // selnez out_reg_hi, true_reg_hi, TMP
5604 can_move_conditionally = true;
5605 use_const_for_false_in = true;
5606 }
5607 // Other long conditional moves would generate 6+ instructions,
5608 // which is too many.
5609 } else {
5610 // movt out_reg_lo, true_reg_lo/ZERO, cc
5611 // movt out_reg_hi, true_reg_hi/ZERO, cc
5612 can_move_conditionally = true;
5613 use_const_for_true_in = is_true_value_zero_constant;
5614 }
5615 break;
5616 case Primitive::kPrimFloat:
5617 case Primitive::kPrimDouble:
5618 // Moving float/double on float/double condition.
5619 if (is_r6) {
5620 can_move_conditionally = true;
5621 if (is_true_value_zero_constant) {
5622 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5623 use_const_for_true_in = true;
5624 } else if (is_false_value_zero_constant) {
5625 // selnez.fmt out_reg, true_reg, temp_cond_reg
5626 use_const_for_false_in = true;
5627 } else {
5628 // sel.fmt temp_cond_reg, false_reg, true_reg
5629 // mov.fmt out_reg, temp_cond_reg
5630 }
5631 } else {
5632 // movt.fmt out_reg, true_reg, cc
5633 can_move_conditionally = true;
5634 }
5635 break;
5636 }
5637 break;
5638 }
5639 }
5640
5641 if (can_move_conditionally) {
5642 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5643 } else {
5644 DCHECK(!use_const_for_false_in);
5645 DCHECK(!use_const_for_true_in);
5646 }
5647
5648 if (locations_to_set != nullptr) {
5649 if (use_const_for_false_in) {
5650 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5651 } else {
5652 locations_to_set->SetInAt(0,
5653 Primitive::IsFloatingPointType(dst_type)
5654 ? Location::RequiresFpuRegister()
5655 : Location::RequiresRegister());
5656 }
5657 if (use_const_for_true_in) {
5658 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5659 } else {
5660 locations_to_set->SetInAt(1,
5661 Primitive::IsFloatingPointType(dst_type)
5662 ? Location::RequiresFpuRegister()
5663 : Location::RequiresRegister());
5664 }
5665 if (materialized) {
5666 locations_to_set->SetInAt(2, Location::RequiresRegister());
5667 }
5668 // On R6 we don't require the output to be the same as the
5669 // first input for conditional moves unlike on R2.
5670 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5671 if (is_out_same_as_first_in) {
5672 locations_to_set->SetOut(Location::SameAsFirstInput());
5673 } else {
5674 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5675 ? Location::RequiresFpuRegister()
5676 : Location::RequiresRegister());
5677 }
5678 }
5679
5680 return can_move_conditionally;
5681}
5682
5683void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5684 LocationSummary* locations = select->GetLocations();
5685 Location dst = locations->Out();
5686 Location src = locations->InAt(1);
5687 Register src_reg = ZERO;
5688 Register src_reg_high = ZERO;
5689 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5690 Register cond_reg = TMP;
5691 int cond_cc = 0;
5692 Primitive::Type cond_type = Primitive::kPrimInt;
5693 bool cond_inverted = false;
5694 Primitive::Type dst_type = select->GetType();
5695
5696 if (IsBooleanValueOrMaterializedCondition(cond)) {
5697 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5698 } else {
5699 HCondition* condition = cond->AsCondition();
5700 LocationSummary* cond_locations = cond->GetLocations();
5701 IfCondition if_cond = condition->GetCondition();
5702 cond_type = condition->InputAt(0)->GetType();
5703 switch (cond_type) {
5704 default:
5705 DCHECK_NE(cond_type, Primitive::kPrimLong);
5706 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5707 break;
5708 case Primitive::kPrimFloat:
5709 case Primitive::kPrimDouble:
5710 cond_inverted = MaterializeFpCompareR2(if_cond,
5711 condition->IsGtBias(),
5712 cond_type,
5713 cond_locations,
5714 cond_cc);
5715 break;
5716 }
5717 }
5718
5719 DCHECK(dst.Equals(locations->InAt(0)));
5720 if (src.IsRegister()) {
5721 src_reg = src.AsRegister<Register>();
5722 } else if (src.IsRegisterPair()) {
5723 src_reg = src.AsRegisterPairLow<Register>();
5724 src_reg_high = src.AsRegisterPairHigh<Register>();
5725 } else if (src.IsConstant()) {
5726 DCHECK(src.GetConstant()->IsZeroBitPattern());
5727 }
5728
5729 switch (cond_type) {
5730 default:
5731 switch (dst_type) {
5732 default:
5733 if (cond_inverted) {
5734 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5735 } else {
5736 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5737 }
5738 break;
5739 case Primitive::kPrimLong:
5740 if (cond_inverted) {
5741 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5742 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5743 } else {
5744 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5745 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5746 }
5747 break;
5748 case Primitive::kPrimFloat:
5749 if (cond_inverted) {
5750 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5751 } else {
5752 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5753 }
5754 break;
5755 case Primitive::kPrimDouble:
5756 if (cond_inverted) {
5757 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5758 } else {
5759 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5760 }
5761 break;
5762 }
5763 break;
5764 case Primitive::kPrimLong:
5765 LOG(FATAL) << "Unreachable";
5766 UNREACHABLE();
5767 case Primitive::kPrimFloat:
5768 case Primitive::kPrimDouble:
5769 switch (dst_type) {
5770 default:
5771 if (cond_inverted) {
5772 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5773 } else {
5774 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5775 }
5776 break;
5777 case Primitive::kPrimLong:
5778 if (cond_inverted) {
5779 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5780 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5781 } else {
5782 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5783 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5784 }
5785 break;
5786 case Primitive::kPrimFloat:
5787 if (cond_inverted) {
5788 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5789 } else {
5790 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5791 }
5792 break;
5793 case Primitive::kPrimDouble:
5794 if (cond_inverted) {
5795 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5796 } else {
5797 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5798 }
5799 break;
5800 }
5801 break;
5802 }
5803}
5804
5805void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5806 LocationSummary* locations = select->GetLocations();
5807 Location dst = locations->Out();
5808 Location false_src = locations->InAt(0);
5809 Location true_src = locations->InAt(1);
5810 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5811 Register cond_reg = TMP;
5812 FRegister fcond_reg = FTMP;
5813 Primitive::Type cond_type = Primitive::kPrimInt;
5814 bool cond_inverted = false;
5815 Primitive::Type dst_type = select->GetType();
5816
5817 if (IsBooleanValueOrMaterializedCondition(cond)) {
5818 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5819 } else {
5820 HCondition* condition = cond->AsCondition();
5821 LocationSummary* cond_locations = cond->GetLocations();
5822 IfCondition if_cond = condition->GetCondition();
5823 cond_type = condition->InputAt(0)->GetType();
5824 switch (cond_type) {
5825 default:
5826 DCHECK_NE(cond_type, Primitive::kPrimLong);
5827 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5828 break;
5829 case Primitive::kPrimFloat:
5830 case Primitive::kPrimDouble:
5831 cond_inverted = MaterializeFpCompareR6(if_cond,
5832 condition->IsGtBias(),
5833 cond_type,
5834 cond_locations,
5835 fcond_reg);
5836 break;
5837 }
5838 }
5839
5840 if (true_src.IsConstant()) {
5841 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5842 }
5843 if (false_src.IsConstant()) {
5844 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5845 }
5846
5847 switch (dst_type) {
5848 default:
5849 if (Primitive::IsFloatingPointType(cond_type)) {
5850 __ Mfc1(cond_reg, fcond_reg);
5851 }
5852 if (true_src.IsConstant()) {
5853 if (cond_inverted) {
5854 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5855 } else {
5856 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5857 }
5858 } else if (false_src.IsConstant()) {
5859 if (cond_inverted) {
5860 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5861 } else {
5862 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5863 }
5864 } else {
5865 DCHECK_NE(cond_reg, AT);
5866 if (cond_inverted) {
5867 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5868 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5869 } else {
5870 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5871 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5872 }
5873 __ Or(dst.AsRegister<Register>(), AT, TMP);
5874 }
5875 break;
5876 case Primitive::kPrimLong: {
5877 if (Primitive::IsFloatingPointType(cond_type)) {
5878 __ Mfc1(cond_reg, fcond_reg);
5879 }
5880 Register dst_lo = dst.AsRegisterPairLow<Register>();
5881 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5882 if (true_src.IsConstant()) {
5883 Register src_lo = false_src.AsRegisterPairLow<Register>();
5884 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5885 if (cond_inverted) {
5886 __ Selnez(dst_lo, src_lo, cond_reg);
5887 __ Selnez(dst_hi, src_hi, cond_reg);
5888 } else {
5889 __ Seleqz(dst_lo, src_lo, cond_reg);
5890 __ Seleqz(dst_hi, src_hi, cond_reg);
5891 }
5892 } else {
5893 DCHECK(false_src.IsConstant());
5894 Register src_lo = true_src.AsRegisterPairLow<Register>();
5895 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5896 if (cond_inverted) {
5897 __ Seleqz(dst_lo, src_lo, cond_reg);
5898 __ Seleqz(dst_hi, src_hi, cond_reg);
5899 } else {
5900 __ Selnez(dst_lo, src_lo, cond_reg);
5901 __ Selnez(dst_hi, src_hi, cond_reg);
5902 }
5903 }
5904 break;
5905 }
5906 case Primitive::kPrimFloat: {
5907 if (!Primitive::IsFloatingPointType(cond_type)) {
5908 // sel*.fmt tests bit 0 of the condition register, account for that.
5909 __ Sltu(TMP, ZERO, cond_reg);
5910 __ Mtc1(TMP, fcond_reg);
5911 }
5912 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5913 if (true_src.IsConstant()) {
5914 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5915 if (cond_inverted) {
5916 __ SelnezS(dst_reg, src_reg, fcond_reg);
5917 } else {
5918 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5919 }
5920 } else if (false_src.IsConstant()) {
5921 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5922 if (cond_inverted) {
5923 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5924 } else {
5925 __ SelnezS(dst_reg, src_reg, fcond_reg);
5926 }
5927 } else {
5928 if (cond_inverted) {
5929 __ SelS(fcond_reg,
5930 true_src.AsFpuRegister<FRegister>(),
5931 false_src.AsFpuRegister<FRegister>());
5932 } else {
5933 __ SelS(fcond_reg,
5934 false_src.AsFpuRegister<FRegister>(),
5935 true_src.AsFpuRegister<FRegister>());
5936 }
5937 __ MovS(dst_reg, fcond_reg);
5938 }
5939 break;
5940 }
5941 case Primitive::kPrimDouble: {
5942 if (!Primitive::IsFloatingPointType(cond_type)) {
5943 // sel*.fmt tests bit 0 of the condition register, account for that.
5944 __ Sltu(TMP, ZERO, cond_reg);
5945 __ Mtc1(TMP, fcond_reg);
5946 }
5947 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5948 if (true_src.IsConstant()) {
5949 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5950 if (cond_inverted) {
5951 __ SelnezD(dst_reg, src_reg, fcond_reg);
5952 } else {
5953 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5954 }
5955 } else if (false_src.IsConstant()) {
5956 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5957 if (cond_inverted) {
5958 __ SeleqzD(dst_reg, src_reg, fcond_reg);
5959 } else {
5960 __ SelnezD(dst_reg, src_reg, fcond_reg);
5961 }
5962 } else {
5963 if (cond_inverted) {
5964 __ SelD(fcond_reg,
5965 true_src.AsFpuRegister<FRegister>(),
5966 false_src.AsFpuRegister<FRegister>());
5967 } else {
5968 __ SelD(fcond_reg,
5969 false_src.AsFpuRegister<FRegister>(),
5970 true_src.AsFpuRegister<FRegister>());
5971 }
5972 __ MovD(dst_reg, fcond_reg);
5973 }
5974 break;
5975 }
5976 }
5977}
5978
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005979void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5980 LocationSummary* locations = new (GetGraph()->GetArena())
5981 LocationSummary(flag, LocationSummary::kNoCall);
5982 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07005983}
5984
Goran Jakovljevicc6418422016-12-05 16:31:55 +01005985void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
5986 __ LoadFromOffset(kLoadWord,
5987 flag->GetLocations()->Out().AsRegister<Register>(),
5988 SP,
5989 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07005990}
5991
David Brazdil74eb1b22015-12-14 11:44:01 +00005992void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
5993 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005994 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00005995}
5996
5997void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005998 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
5999 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
6000 if (is_r6) {
6001 GenConditionalMoveR6(select);
6002 } else {
6003 GenConditionalMoveR2(select);
6004 }
6005 } else {
6006 LocationSummary* locations = select->GetLocations();
6007 MipsLabel false_target;
6008 GenerateTestAndBranch(select,
6009 /* condition_input_index */ 2,
6010 /* true_target */ nullptr,
6011 &false_target);
6012 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6013 __ Bind(&false_target);
6014 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006015}
6016
David Srbecky0cf44932015-12-09 14:09:59 +00006017void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6018 new (GetGraph()->GetArena()) LocationSummary(info);
6019}
6020
David Srbeckyd28f4a02016-03-14 17:14:24 +00006021void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6022 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006023}
6024
6025void CodeGeneratorMIPS::GenerateNop() {
6026 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006027}
6028
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006029void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6030 Primitive::Type field_type = field_info.GetFieldType();
6031 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6032 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006033 bool object_field_get_with_read_barrier =
6034 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006035 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006036 instruction,
6037 generate_volatile
6038 ? LocationSummary::kCallOnMainOnly
6039 : (object_field_get_with_read_barrier
6040 ? LocationSummary::kCallOnSlowPath
6041 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006042
Alexey Frunzec61c0762017-04-10 13:54:23 -07006043 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6044 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6045 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006046 locations->SetInAt(0, Location::RequiresRegister());
6047 if (generate_volatile) {
6048 InvokeRuntimeCallingConvention calling_convention;
6049 // need A0 to hold base + offset
6050 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6051 if (field_type == Primitive::kPrimLong) {
6052 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6053 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006054 // Use Location::Any() to prevent situations when running out of available fp registers.
6055 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006056 // Need some temp core regs since FP results are returned in core registers
6057 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6058 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6059 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6060 }
6061 } else {
6062 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6063 locations->SetOut(Location::RequiresFpuRegister());
6064 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006065 // The output overlaps in the case of an object field get with
6066 // read barriers enabled: we do not want the move to overwrite the
6067 // object's location, as we need it to emit the read barrier.
6068 locations->SetOut(Location::RequiresRegister(),
6069 object_field_get_with_read_barrier
6070 ? Location::kOutputOverlap
6071 : Location::kNoOutputOverlap);
6072 }
6073 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6074 // We need a temporary register for the read barrier marking slow
6075 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
6076 locations->AddTemp(Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006077 }
6078 }
6079}
6080
6081void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6082 const FieldInfo& field_info,
6083 uint32_t dex_pc) {
6084 Primitive::Type type = field_info.GetFieldType();
6085 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006086 Location obj_loc = locations->InAt(0);
6087 Register obj = obj_loc.AsRegister<Register>();
6088 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006089 LoadOperandType load_type = kLoadUnsignedByte;
6090 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006091 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006092 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006093
6094 switch (type) {
6095 case Primitive::kPrimBoolean:
6096 load_type = kLoadUnsignedByte;
6097 break;
6098 case Primitive::kPrimByte:
6099 load_type = kLoadSignedByte;
6100 break;
6101 case Primitive::kPrimShort:
6102 load_type = kLoadSignedHalfword;
6103 break;
6104 case Primitive::kPrimChar:
6105 load_type = kLoadUnsignedHalfword;
6106 break;
6107 case Primitive::kPrimInt:
6108 case Primitive::kPrimFloat:
6109 case Primitive::kPrimNot:
6110 load_type = kLoadWord;
6111 break;
6112 case Primitive::kPrimLong:
6113 case Primitive::kPrimDouble:
6114 load_type = kLoadDoubleword;
6115 break;
6116 case Primitive::kPrimVoid:
6117 LOG(FATAL) << "Unreachable type " << type;
6118 UNREACHABLE();
6119 }
6120
6121 if (is_volatile && load_type == kLoadDoubleword) {
6122 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006123 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006124 // Do implicit Null check
6125 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6126 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006127 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006128 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6129 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006130 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006131 if (dst_loc.IsFpuRegister()) {
6132 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006133 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006134 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006135 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006136 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006137 __ StoreToOffset(kStoreWord,
6138 locations->GetTemp(1).AsRegister<Register>(),
6139 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006140 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006141 __ StoreToOffset(kStoreWord,
6142 locations->GetTemp(2).AsRegister<Register>(),
6143 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006144 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006145 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006146 }
6147 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006148 if (type == Primitive::kPrimNot) {
6149 // /* HeapReference<Object> */ dst = *(obj + offset)
6150 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
6151 Location temp_loc = locations->GetTemp(0);
6152 // Note that a potential implicit null check is handled in this
6153 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6154 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6155 dst_loc,
6156 obj,
6157 offset,
6158 temp_loc,
6159 /* needs_null_check */ true);
6160 if (is_volatile) {
6161 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6162 }
6163 } else {
6164 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6165 if (is_volatile) {
6166 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6167 }
6168 // If read barriers are enabled, emit read barriers other than
6169 // Baker's using a slow path (and also unpoison the loaded
6170 // reference, if heap poisoning is enabled).
6171 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6172 }
6173 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006174 Register dst;
6175 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006176 DCHECK(dst_loc.IsRegisterPair());
6177 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006178 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006179 DCHECK(dst_loc.IsRegister());
6180 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006181 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006182 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006183 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006184 DCHECK(dst_loc.IsFpuRegister());
6185 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006186 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006187 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006188 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006189 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006190 }
6191 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006192 }
6193
Alexey Frunze15958152017-02-09 19:08:30 -08006194 // Memory barriers, in the case of references, are handled in the
6195 // previous switch statement.
6196 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006197 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6198 }
6199}
6200
6201void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6202 Primitive::Type field_type = field_info.GetFieldType();
6203 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6204 bool generate_volatile = field_info.IsVolatile() && is_wide;
6205 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006206 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006207
6208 locations->SetInAt(0, Location::RequiresRegister());
6209 if (generate_volatile) {
6210 InvokeRuntimeCallingConvention calling_convention;
6211 // need A0 to hold base + offset
6212 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6213 if (field_type == Primitive::kPrimLong) {
6214 locations->SetInAt(1, Location::RegisterPairLocation(
6215 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6216 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006217 // Use Location::Any() to prevent situations when running out of available fp registers.
6218 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006219 // Pass FP parameters in core registers.
6220 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6221 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6222 }
6223 } else {
6224 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006225 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006226 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006227 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006228 }
6229 }
6230}
6231
6232void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6233 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006234 uint32_t dex_pc,
6235 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006236 Primitive::Type type = field_info.GetFieldType();
6237 LocationSummary* locations = instruction->GetLocations();
6238 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006239 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006240 StoreOperandType store_type = kStoreByte;
6241 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006242 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006243 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006244 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006245
6246 switch (type) {
6247 case Primitive::kPrimBoolean:
6248 case Primitive::kPrimByte:
6249 store_type = kStoreByte;
6250 break;
6251 case Primitive::kPrimShort:
6252 case Primitive::kPrimChar:
6253 store_type = kStoreHalfword;
6254 break;
6255 case Primitive::kPrimInt:
6256 case Primitive::kPrimFloat:
6257 case Primitive::kPrimNot:
6258 store_type = kStoreWord;
6259 break;
6260 case Primitive::kPrimLong:
6261 case Primitive::kPrimDouble:
6262 store_type = kStoreDoubleword;
6263 break;
6264 case Primitive::kPrimVoid:
6265 LOG(FATAL) << "Unreachable type " << type;
6266 UNREACHABLE();
6267 }
6268
6269 if (is_volatile) {
6270 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6271 }
6272
6273 if (is_volatile && store_type == kStoreDoubleword) {
6274 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006275 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006276 // Do implicit Null check.
6277 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6278 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6279 if (type == Primitive::kPrimDouble) {
6280 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006281 if (value_location.IsFpuRegister()) {
6282 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6283 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006284 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006285 value_location.AsFpuRegister<FRegister>());
6286 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006287 __ LoadFromOffset(kLoadWord,
6288 locations->GetTemp(1).AsRegister<Register>(),
6289 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006290 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006291 __ LoadFromOffset(kLoadWord,
6292 locations->GetTemp(2).AsRegister<Register>(),
6293 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006294 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006295 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006296 DCHECK(value_location.IsConstant());
6297 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6298 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006299 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6300 locations->GetTemp(1).AsRegister<Register>(),
6301 value);
6302 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006303 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006304 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006305 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6306 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006307 if (value_location.IsConstant()) {
6308 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6309 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6310 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006311 Register src;
6312 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006313 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006314 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006315 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006316 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006317 if (kPoisonHeapReferences && needs_write_barrier) {
6318 // Note that in the case where `value` is a null reference,
6319 // we do not enter this block, as a null reference does not
6320 // need poisoning.
6321 DCHECK_EQ(type, Primitive::kPrimNot);
6322 __ PoisonHeapReference(TMP, src);
6323 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6324 } else {
6325 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6326 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006327 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006328 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006329 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006330 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006331 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006332 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006333 }
6334 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006335 }
6336
Alexey Frunzec061de12017-02-14 13:27:23 -08006337 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006338 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006339 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006340 }
6341
6342 if (is_volatile) {
6343 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6344 }
6345}
6346
6347void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6348 HandleFieldGet(instruction, instruction->GetFieldInfo());
6349}
6350
6351void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6352 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6353}
6354
6355void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6356 HandleFieldSet(instruction, instruction->GetFieldInfo());
6357}
6358
6359void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006360 HandleFieldSet(instruction,
6361 instruction->GetFieldInfo(),
6362 instruction->GetDexPc(),
6363 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006364}
6365
Alexey Frunze15958152017-02-09 19:08:30 -08006366void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6367 HInstruction* instruction,
6368 Location out,
6369 uint32_t offset,
6370 Location maybe_temp,
6371 ReadBarrierOption read_barrier_option) {
6372 Register out_reg = out.AsRegister<Register>();
6373 if (read_barrier_option == kWithReadBarrier) {
6374 CHECK(kEmitCompilerReadBarrier);
6375 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6376 if (kUseBakerReadBarrier) {
6377 // Load with fast path based Baker's read barrier.
6378 // /* HeapReference<Object> */ out = *(out + offset)
6379 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6380 out,
6381 out_reg,
6382 offset,
6383 maybe_temp,
6384 /* needs_null_check */ false);
6385 } else {
6386 // Load with slow path based read barrier.
6387 // Save the value of `out` into `maybe_temp` before overwriting it
6388 // in the following move operation, as we will need it for the
6389 // read barrier below.
6390 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6391 // /* HeapReference<Object> */ out = *(out + offset)
6392 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6393 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6394 }
6395 } else {
6396 // Plain load with no read barrier.
6397 // /* HeapReference<Object> */ out = *(out + offset)
6398 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6399 __ MaybeUnpoisonHeapReference(out_reg);
6400 }
6401}
6402
6403void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6404 HInstruction* instruction,
6405 Location out,
6406 Location obj,
6407 uint32_t offset,
6408 Location maybe_temp,
6409 ReadBarrierOption read_barrier_option) {
6410 Register out_reg = out.AsRegister<Register>();
6411 Register obj_reg = obj.AsRegister<Register>();
6412 if (read_barrier_option == kWithReadBarrier) {
6413 CHECK(kEmitCompilerReadBarrier);
6414 if (kUseBakerReadBarrier) {
6415 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6416 // Load with fast path based Baker's read barrier.
6417 // /* HeapReference<Object> */ out = *(obj + offset)
6418 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6419 out,
6420 obj_reg,
6421 offset,
6422 maybe_temp,
6423 /* needs_null_check */ false);
6424 } else {
6425 // Load with slow path based read barrier.
6426 // /* HeapReference<Object> */ out = *(obj + offset)
6427 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6428 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6429 }
6430 } else {
6431 // Plain load with no read barrier.
6432 // /* HeapReference<Object> */ out = *(obj + offset)
6433 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6434 __ MaybeUnpoisonHeapReference(out_reg);
6435 }
6436}
6437
6438void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6439 Location root,
6440 Register obj,
6441 uint32_t offset,
6442 ReadBarrierOption read_barrier_option) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07006443 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006444 if (read_barrier_option == kWithReadBarrier) {
6445 DCHECK(kEmitCompilerReadBarrier);
6446 if (kUseBakerReadBarrier) {
6447 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6448 // Baker's read barrier are used:
6449 //
6450 // root = obj.field;
6451 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6452 // if (temp != null) {
6453 // root = temp(root)
6454 // }
6455
6456 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6457 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6458 static_assert(
6459 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6460 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6461 "have different sizes.");
6462 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6463 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6464 "have different sizes.");
6465
6466 // Slow path marking the GC root `root`.
6467 Location temp = Location::RegisterLocation(T9);
6468 SlowPathCodeMIPS* slow_path =
6469 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6470 instruction,
6471 root,
6472 /*entrypoint*/ temp);
6473 codegen_->AddSlowPath(slow_path);
6474
6475 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6476 const int32_t entry_point_offset =
6477 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6478 // Loading the entrypoint does not require a load acquire since it is only changed when
6479 // threads are suspended or running a checkpoint.
6480 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6481 // The entrypoint is null when the GC is not marking, this prevents one load compared to
6482 // checking GetIsGcMarking.
6483 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6484 __ Bind(slow_path->GetExitLabel());
6485 } else {
6486 // GC root loaded through a slow path for read barriers other
6487 // than Baker's.
6488 // /* GcRoot<mirror::Object>* */ root = obj + offset
6489 __ Addiu32(root_reg, obj, offset);
6490 // /* mirror::Object* */ root = root->Read()
6491 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6492 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006493 } else {
6494 // Plain GC root load with no read barrier.
6495 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6496 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6497 // Note that GC roots are not affected by heap poisoning, thus we
6498 // do not have to unpoison `root_reg` here.
6499 }
6500}
6501
Alexey Frunze15958152017-02-09 19:08:30 -08006502void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6503 Location ref,
6504 Register obj,
6505 uint32_t offset,
6506 Location temp,
6507 bool needs_null_check) {
6508 DCHECK(kEmitCompilerReadBarrier);
6509 DCHECK(kUseBakerReadBarrier);
6510
6511 // /* HeapReference<Object> */ ref = *(obj + offset)
6512 Location no_index = Location::NoLocation();
6513 ScaleFactor no_scale_factor = TIMES_1;
6514 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6515 ref,
6516 obj,
6517 offset,
6518 no_index,
6519 no_scale_factor,
6520 temp,
6521 needs_null_check);
6522}
6523
6524void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6525 Location ref,
6526 Register obj,
6527 uint32_t data_offset,
6528 Location index,
6529 Location temp,
6530 bool needs_null_check) {
6531 DCHECK(kEmitCompilerReadBarrier);
6532 DCHECK(kUseBakerReadBarrier);
6533
6534 static_assert(
6535 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6536 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
6537 // /* HeapReference<Object> */ ref =
6538 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
6539 ScaleFactor scale_factor = TIMES_4;
6540 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6541 ref,
6542 obj,
6543 data_offset,
6544 index,
6545 scale_factor,
6546 temp,
6547 needs_null_check);
6548}
6549
6550void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6551 Location ref,
6552 Register obj,
6553 uint32_t offset,
6554 Location index,
6555 ScaleFactor scale_factor,
6556 Location temp,
6557 bool needs_null_check,
6558 bool always_update_field) {
6559 DCHECK(kEmitCompilerReadBarrier);
6560 DCHECK(kUseBakerReadBarrier);
6561
6562 // In slow path based read barriers, the read barrier call is
6563 // inserted after the original load. However, in fast path based
6564 // Baker's read barriers, we need to perform the load of
6565 // mirror::Object::monitor_ *before* the original reference load.
6566 // This load-load ordering is required by the read barrier.
6567 // The fast path/slow path (for Baker's algorithm) should look like:
6568 //
6569 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6570 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6571 // HeapReference<Object> ref = *src; // Original reference load.
6572 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6573 // if (is_gray) {
6574 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6575 // }
6576 //
6577 // Note: the original implementation in ReadBarrier::Barrier is
6578 // slightly more complex as it performs additional checks that we do
6579 // not do here for performance reasons.
6580
6581 Register ref_reg = ref.AsRegister<Register>();
6582 Register temp_reg = temp.AsRegister<Register>();
6583 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6584
6585 // /* int32_t */ monitor = obj->monitor_
6586 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6587 if (needs_null_check) {
6588 MaybeRecordImplicitNullCheck(instruction);
6589 }
6590 // /* LockWord */ lock_word = LockWord(monitor)
6591 static_assert(sizeof(LockWord) == sizeof(int32_t),
6592 "art::LockWord and int32_t have different sizes.");
6593
6594 __ Sync(0); // Barrier to prevent load-load reordering.
6595
6596 // The actual reference load.
6597 if (index.IsValid()) {
6598 // Load types involving an "index": ArrayGet,
6599 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6600 // intrinsics.
6601 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6602 if (index.IsConstant()) {
6603 size_t computed_offset =
6604 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6605 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6606 } else {
6607 // Handle the special case of the
6608 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6609 // intrinsics, which use a register pair as index ("long
6610 // offset"), of which only the low part contains data.
6611 Register index_reg = index.IsRegisterPair()
6612 ? index.AsRegisterPairLow<Register>()
6613 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006614 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006615 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6616 }
6617 } else {
6618 // /* HeapReference<Object> */ ref = *(obj + offset)
6619 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6620 }
6621
6622 // Object* ref = ref_addr->AsMirrorPtr()
6623 __ MaybeUnpoisonHeapReference(ref_reg);
6624
6625 // Slow path marking the object `ref` when it is gray.
6626 SlowPathCodeMIPS* slow_path;
6627 if (always_update_field) {
6628 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6629 // of the form `obj + field_offset`, where `obj` is a register and
6630 // `field_offset` is a register pair (of which only the lower half
6631 // is used). Thus `offset` and `scale_factor` above are expected
6632 // to be null in this code path.
6633 DCHECK_EQ(offset, 0u);
6634 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6635 slow_path = new (GetGraph()->GetArena())
6636 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6637 ref,
6638 obj,
6639 /* field_offset */ index,
6640 temp_reg);
6641 } else {
6642 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6643 }
6644 AddSlowPath(slow_path);
6645
6646 // if (rb_state == ReadBarrier::GrayState())
6647 // ref = ReadBarrier::Mark(ref);
6648 // Given the numeric representation, it's enough to check the low bit of the
6649 // rb_state. We do that by shifting the bit into the sign bit (31) and
6650 // performing a branch on less than zero.
6651 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6652 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6653 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6654 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6655 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6656 __ Bind(slow_path->GetExitLabel());
6657}
6658
6659void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6660 Location out,
6661 Location ref,
6662 Location obj,
6663 uint32_t offset,
6664 Location index) {
6665 DCHECK(kEmitCompilerReadBarrier);
6666
6667 // Insert a slow path based read barrier *after* the reference load.
6668 //
6669 // If heap poisoning is enabled, the unpoisoning of the loaded
6670 // reference will be carried out by the runtime within the slow
6671 // path.
6672 //
6673 // Note that `ref` currently does not get unpoisoned (when heap
6674 // poisoning is enabled), which is alright as the `ref` argument is
6675 // not used by the artReadBarrierSlow entry point.
6676 //
6677 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6678 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6679 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6680 AddSlowPath(slow_path);
6681
6682 __ B(slow_path->GetEntryLabel());
6683 __ Bind(slow_path->GetExitLabel());
6684}
6685
6686void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
6687 Location out,
6688 Location ref,
6689 Location obj,
6690 uint32_t offset,
6691 Location index) {
6692 if (kEmitCompilerReadBarrier) {
6693 // Baker's read barriers shall be handled by the fast path
6694 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
6695 DCHECK(!kUseBakerReadBarrier);
6696 // If heap poisoning is enabled, unpoisoning will be taken care of
6697 // by the runtime within the slow path.
6698 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
6699 } else if (kPoisonHeapReferences) {
6700 __ UnpoisonHeapReference(out.AsRegister<Register>());
6701 }
6702}
6703
6704void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
6705 Location out,
6706 Location root) {
6707 DCHECK(kEmitCompilerReadBarrier);
6708
6709 // Insert a slow path based read barrier *after* the GC root load.
6710 //
6711 // Note that GC roots are not affected by heap poisoning, so we do
6712 // not need to do anything special for this here.
6713 SlowPathCodeMIPS* slow_path =
6714 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
6715 AddSlowPath(slow_path);
6716
6717 __ B(slow_path->GetEntryLabel());
6718 __ Bind(slow_path->GetExitLabel());
6719}
6720
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006721void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006722 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
6723 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07006724 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006725 switch (type_check_kind) {
6726 case TypeCheckKind::kExactCheck:
6727 case TypeCheckKind::kAbstractClassCheck:
6728 case TypeCheckKind::kClassHierarchyCheck:
6729 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08006730 call_kind =
6731 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07006732 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006733 break;
6734 case TypeCheckKind::kArrayCheck:
6735 case TypeCheckKind::kUnresolvedCheck:
6736 case TypeCheckKind::kInterfaceCheck:
6737 call_kind = LocationSummary::kCallOnSlowPath;
6738 break;
6739 }
6740
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006741 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07006742 if (baker_read_barrier_slow_path) {
6743 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6744 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006745 locations->SetInAt(0, Location::RequiresRegister());
6746 locations->SetInAt(1, Location::RequiresRegister());
6747 // The output does overlap inputs.
6748 // Note that TypeCheckSlowPathMIPS uses this register too.
6749 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08006750 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006751}
6752
6753void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006754 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006755 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006756 Location obj_loc = locations->InAt(0);
6757 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006758 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006759 Location out_loc = locations->Out();
6760 Register out = out_loc.AsRegister<Register>();
6761 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
6762 DCHECK_LE(num_temps, 1u);
6763 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006764 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
6765 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
6766 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
6767 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006768 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006769 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006770
6771 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006772 // Avoid this check if we know `obj` is not null.
6773 if (instruction->MustDoNullCheck()) {
6774 __ Move(out, ZERO);
6775 __ Beqz(obj, &done);
6776 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006777
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006778 switch (type_check_kind) {
6779 case TypeCheckKind::kExactCheck: {
6780 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006781 GenerateReferenceLoadTwoRegisters(instruction,
6782 out_loc,
6783 obj_loc,
6784 class_offset,
6785 maybe_temp_loc,
6786 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006787 // Classes must be equal for the instanceof to succeed.
6788 __ Xor(out, out, cls);
6789 __ Sltiu(out, out, 1);
6790 break;
6791 }
6792
6793 case TypeCheckKind::kAbstractClassCheck: {
6794 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006795 GenerateReferenceLoadTwoRegisters(instruction,
6796 out_loc,
6797 obj_loc,
6798 class_offset,
6799 maybe_temp_loc,
6800 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006801 // If the class is abstract, we eagerly fetch the super class of the
6802 // object to avoid doing a comparison we know will fail.
6803 MipsLabel loop;
6804 __ Bind(&loop);
6805 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006806 GenerateReferenceLoadOneRegister(instruction,
6807 out_loc,
6808 super_offset,
6809 maybe_temp_loc,
6810 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006811 // If `out` is null, we use it for the result, and jump to `done`.
6812 __ Beqz(out, &done);
6813 __ Bne(out, cls, &loop);
6814 __ LoadConst32(out, 1);
6815 break;
6816 }
6817
6818 case TypeCheckKind::kClassHierarchyCheck: {
6819 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006820 GenerateReferenceLoadTwoRegisters(instruction,
6821 out_loc,
6822 obj_loc,
6823 class_offset,
6824 maybe_temp_loc,
6825 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006826 // Walk over the class hierarchy to find a match.
6827 MipsLabel loop, success;
6828 __ Bind(&loop);
6829 __ Beq(out, cls, &success);
6830 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08006831 GenerateReferenceLoadOneRegister(instruction,
6832 out_loc,
6833 super_offset,
6834 maybe_temp_loc,
6835 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006836 __ Bnez(out, &loop);
6837 // If `out` is null, we use it for the result, and jump to `done`.
6838 __ B(&done);
6839 __ Bind(&success);
6840 __ LoadConst32(out, 1);
6841 break;
6842 }
6843
6844 case TypeCheckKind::kArrayObjectCheck: {
6845 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006846 GenerateReferenceLoadTwoRegisters(instruction,
6847 out_loc,
6848 obj_loc,
6849 class_offset,
6850 maybe_temp_loc,
6851 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006852 // Do an exact check.
6853 MipsLabel success;
6854 __ Beq(out, cls, &success);
6855 // Otherwise, we need to check that the object's class is a non-primitive array.
6856 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08006857 GenerateReferenceLoadOneRegister(instruction,
6858 out_loc,
6859 component_offset,
6860 maybe_temp_loc,
6861 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006862 // If `out` is null, we use it for the result, and jump to `done`.
6863 __ Beqz(out, &done);
6864 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
6865 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
6866 __ Sltiu(out, out, 1);
6867 __ B(&done);
6868 __ Bind(&success);
6869 __ LoadConst32(out, 1);
6870 break;
6871 }
6872
6873 case TypeCheckKind::kArrayCheck: {
6874 // No read barrier since the slow path will retry upon failure.
6875 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08006876 GenerateReferenceLoadTwoRegisters(instruction,
6877 out_loc,
6878 obj_loc,
6879 class_offset,
6880 maybe_temp_loc,
6881 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006882 DCHECK(locations->OnlyCallsOnSlowPath());
6883 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6884 /* is_fatal */ false);
6885 codegen_->AddSlowPath(slow_path);
6886 __ Bne(out, cls, slow_path->GetEntryLabel());
6887 __ LoadConst32(out, 1);
6888 break;
6889 }
6890
6891 case TypeCheckKind::kUnresolvedCheck:
6892 case TypeCheckKind::kInterfaceCheck: {
6893 // Note that we indeed only call on slow path, but we always go
6894 // into the slow path for the unresolved and interface check
6895 // cases.
6896 //
6897 // We cannot directly call the InstanceofNonTrivial runtime
6898 // entry point without resorting to a type checking slow path
6899 // here (i.e. by calling InvokeRuntime directly), as it would
6900 // require to assign fixed registers for the inputs of this
6901 // HInstanceOf instruction (following the runtime calling
6902 // convention), which might be cluttered by the potential first
6903 // read barrier emission at the beginning of this method.
6904 //
6905 // TODO: Introduce a new runtime entry point taking the object
6906 // to test (instead of its class) as argument, and let it deal
6907 // with the read barrier issues. This will let us refactor this
6908 // case of the `switch` code as it was previously (with a direct
6909 // call to the runtime not using a type checking slow path).
6910 // This should also be beneficial for the other cases above.
6911 DCHECK(locations->OnlyCallsOnSlowPath());
6912 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
6913 /* is_fatal */ false);
6914 codegen_->AddSlowPath(slow_path);
6915 __ B(slow_path->GetEntryLabel());
6916 break;
6917 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006918 }
6919
6920 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08006921
6922 if (slow_path != nullptr) {
6923 __ Bind(slow_path->GetExitLabel());
6924 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006925}
6926
6927void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
6928 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6929 locations->SetOut(Location::ConstantLocation(constant));
6930}
6931
6932void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
6933 // Will be generated at use site.
6934}
6935
6936void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
6937 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
6938 locations->SetOut(Location::ConstantLocation(constant));
6939}
6940
6941void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
6942 // Will be generated at use site.
6943}
6944
6945void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
6946 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
6947 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
6948}
6949
6950void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6951 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006952 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006953 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08006954 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006955}
6956
6957void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
6958 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
6959 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006960 Location receiver = invoke->GetLocations()->InAt(0);
6961 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07006962 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006963
6964 // Set the hidden argument.
6965 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
6966 invoke->GetDexMethodIndex());
6967
6968 // temp = object->GetClass();
6969 if (receiver.IsStackSlot()) {
6970 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
6971 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
6972 } else {
6973 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
6974 }
6975 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08006976 // Instead of simply (possibly) unpoisoning `temp` here, we should
6977 // emit a read barrier for the previous class reference load.
6978 // However this is not required in practice, as this is an
6979 // intermediate/temporary reference and because the current
6980 // concurrent copying collector keeps the from-space memory
6981 // intact/accessible until the end of the marking phase (the
6982 // concurrent copying collector may not in the future).
6983 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00006984 __ LoadFromOffset(kLoadWord, temp, temp,
6985 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
6986 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00006987 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006988 // temp = temp->GetImtEntryAt(method_offset);
6989 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
6990 // T9 = temp->GetEntryPoint();
6991 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
6992 // T9();
6993 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07006994 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006995 DCHECK(!codegen_->IsLeafMethod());
6996 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
6997}
6998
6999void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07007000 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7001 if (intrinsic.TryDispatch(invoke)) {
7002 return;
7003 }
7004
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007005 HandleInvoke(invoke);
7006}
7007
7008void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007009 // Explicit clinit checks triggered by static invokes must have been pruned by
7010 // art::PrepareForRegisterAllocation.
7011 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007012
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007013 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007014 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007015
Chris Larsen701566a2015-10-27 15:29:13 -07007016 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7017 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007018 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7019 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7020 }
Chris Larsen701566a2015-10-27 15:29:13 -07007021 return;
7022 }
7023
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007024 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007025
7026 // Add the extra input register if either the dex cache array base register
7027 // or the PC-relative base register for accessing literals is needed.
7028 if (has_extra_input) {
7029 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7030 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007031}
7032
Orion Hodsonac141392017-01-13 11:53:47 +00007033void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7034 HandleInvoke(invoke);
7035}
7036
7037void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7038 codegen_->GenerateInvokePolymorphicCall(invoke);
7039}
7040
Chris Larsen701566a2015-10-27 15:29:13 -07007041static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007042 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007043 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7044 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007045 return true;
7046 }
7047 return false;
7048}
7049
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007050HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007051 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007052 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007053 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007054 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007055 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007056 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007057 bool is_r6 = GetInstructionSetFeatures().IsR6();
7058 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007059 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007060 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007061 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007062 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007063 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007064 case HLoadString::LoadKind::kBootImageAddress:
7065 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007066 case HLoadString::LoadKind::kJitTableAddress:
7067 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007068 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007069 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007070 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007071 fallback_load = false;
7072 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007073 }
7074 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007075 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007076 }
7077 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007078}
7079
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007080HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7081 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007082 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007083 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007084 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7085 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007086 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007087 bool is_r6 = GetInstructionSetFeatures().IsR6();
7088 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007089 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007090 case HLoadClass::LoadKind::kInvalid:
7091 LOG(FATAL) << "UNREACHABLE";
7092 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007093 case HLoadClass::LoadKind::kReferrersClass:
7094 fallback_load = false;
7095 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007096 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007097 case HLoadClass::LoadKind::kBssEntry:
7098 DCHECK(!Runtime::Current()->UseJitCompilation());
7099 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007100 case HLoadClass::LoadKind::kBootImageAddress:
7101 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007102 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007103 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007104 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007105 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007106 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007107 fallback_load = false;
7108 break;
7109 }
7110 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007111 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007112 }
7113 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007114}
7115
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007116Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7117 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007118 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007119 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7120 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7121 if (!invoke->GetLocations()->Intrinsified()) {
7122 return location.AsRegister<Register>();
7123 }
7124 // For intrinsics we allow any location, so it may be on the stack.
7125 if (!location.IsRegister()) {
7126 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7127 return temp;
7128 }
7129 // For register locations, check if the register was saved. If so, get it from the stack.
7130 // Note: There is a chance that the register was saved but not overwritten, so we could
7131 // save one load. However, since this is just an intrinsic slow path we prefer this
7132 // simple and more robust approach rather that trying to determine if that's the case.
7133 SlowPathCode* slow_path = GetCurrentSlowPath();
7134 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7135 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7136 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7137 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7138 return temp;
7139 }
7140 return location.AsRegister<Register>();
7141}
7142
Vladimir Markodc151b22015-10-15 18:02:30 +01007143HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7144 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007145 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007146 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007147 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007148 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007149 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7150 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007151 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007152 bool is_r6 = GetInstructionSetFeatures().IsR6();
7153 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007154 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007155 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007156 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007157 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007158 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007159 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007160 break;
7161 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007162 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007163 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007164 dispatch_info.method_load_data = 0;
7165 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007166 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007167}
7168
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007169void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7170 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007171 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007172 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007173 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7174 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007175 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007176 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007177 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7178 : ZERO;
7179
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007180 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007181 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007182 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007183 uint32_t offset =
7184 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007185 __ LoadFromOffset(kLoadWord,
7186 temp.AsRegister<Register>(),
7187 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007188 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007189 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007190 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007191 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007192 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007193 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007194 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7195 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007196 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7197 PcRelativePatchInfo* info_low =
7198 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007199 bool reordering = __ SetReorder(false);
7200 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007201 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Alexey Frunze6079dca2017-05-28 19:10:28 -07007202 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678);
Vladimir Marko65979462017-05-19 17:25:12 +01007203 __ SetReorder(reordering);
7204 break;
7205 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007206 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7207 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7208 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007209 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007210 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007211 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007212 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7213 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007214 Register temp_reg = temp.AsRegister<Register>();
7215 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007216 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg, info_low);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007217 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678);
7218 __ SetReorder(reordering);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007219 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007220 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007221 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7222 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7223 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007224 }
7225 }
7226
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007227 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007228 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007229 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007230 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007231 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7232 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007233 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007234 T9,
7235 callee_method.AsRegister<Register>(),
7236 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007237 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007238 // T9()
7239 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007240 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007241 break;
7242 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007243 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7244
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007245 DCHECK(!IsLeafMethod());
7246}
7247
7248void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007249 // Explicit clinit checks triggered by static invokes must have been pruned by
7250 // art::PrepareForRegisterAllocation.
7251 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007252
7253 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7254 return;
7255 }
7256
7257 LocationSummary* locations = invoke->GetLocations();
7258 codegen_->GenerateStaticOrDirectCall(invoke,
7259 locations->HasTemps()
7260 ? locations->GetTemp(0)
7261 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007262}
7263
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007264void CodeGeneratorMIPS::GenerateVirtualCall(
7265 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007266 // Use the calling convention instead of the location of the receiver, as
7267 // intrinsics may have put the receiver in a different register. In the intrinsics
7268 // slow path, the arguments have been moved to the right place, so here we are
7269 // guaranteed that the receiver is the first register of the calling convention.
7270 InvokeDexCallingConvention calling_convention;
7271 Register receiver = calling_convention.GetRegisterAt(0);
7272
Chris Larsen3acee732015-11-18 13:31:08 -08007273 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007274 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7275 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7276 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007277 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007278
7279 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007280 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007281 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007282 // Instead of simply (possibly) unpoisoning `temp` here, we should
7283 // emit a read barrier for the previous class reference load.
7284 // However this is not required in practice, as this is an
7285 // intermediate/temporary reference and because the current
7286 // concurrent copying collector keeps the from-space memory
7287 // intact/accessible until the end of the marking phase (the
7288 // concurrent copying collector may not in the future).
7289 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007290 // temp = temp->GetMethodAt(method_offset);
7291 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7292 // T9 = temp->GetEntryPoint();
7293 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7294 // T9();
7295 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007296 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007297 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007298}
7299
7300void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7301 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7302 return;
7303 }
7304
7305 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007306 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007307}
7308
7309void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007310 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007311 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007312 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007313 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7314 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007315 return;
7316 }
Vladimir Marko41559982017-01-06 14:04:23 +00007317 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007318 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007319 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7320 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007321 ? LocationSummary::kCallOnSlowPath
7322 : LocationSummary::kNoCall;
7323 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007324 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7325 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7326 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007327 switch (load_kind) {
7328 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007329 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007330 case HLoadClass::LoadKind::kBootImageAddress:
7331 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007332 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007333 break;
7334 }
7335 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007336 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007337 locations->SetInAt(0, Location::RequiresRegister());
7338 break;
7339 default:
7340 break;
7341 }
7342 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007343 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7344 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7345 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007346 // Request a temp to hold the BSS entry location for the slow path.
7347 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007348 RegisterSet caller_saves = RegisterSet::Empty();
7349 InvokeRuntimeCallingConvention calling_convention;
7350 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7351 locations->SetCustomSlowPathCallerSaves(caller_saves);
7352 } else {
7353 // For non-Baker read barriers we have a temp-clobbering call.
7354 }
7355 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007356}
7357
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007358// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7359// move.
7360void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007361 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007362 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007363 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007364 return;
7365 }
Vladimir Marko41559982017-01-06 14:04:23 +00007366 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007367
Vladimir Marko41559982017-01-06 14:04:23 +00007368 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007369 Location out_loc = locations->Out();
7370 Register out = out_loc.AsRegister<Register>();
7371 Register base_or_current_method_reg;
7372 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7373 switch (load_kind) {
7374 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007375 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007376 case HLoadClass::LoadKind::kBootImageAddress:
7377 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007378 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7379 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007380 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007381 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007382 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7383 break;
7384 default:
7385 base_or_current_method_reg = ZERO;
7386 break;
7387 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007388
Alexey Frunze15958152017-02-09 19:08:30 -08007389 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7390 ? kWithoutReadBarrier
7391 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007392 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007393 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007394 switch (load_kind) {
7395 case HLoadClass::LoadKind::kReferrersClass: {
7396 DCHECK(!cls->CanCallRuntime());
7397 DCHECK(!cls->MustGenerateClinitCheck());
7398 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7399 GenerateGcRootFieldLoad(cls,
7400 out_loc,
7401 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007402 ArtMethod::DeclaringClassOffset().Int32Value(),
7403 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007404 break;
7405 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007406 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007407 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007408 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007409 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007410 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007411 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7412 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007413 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007414 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7415 out,
7416 base_or_current_method_reg,
7417 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007418 __ Addiu(out, out, /* placeholder */ 0x5678);
7419 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007420 break;
7421 }
7422 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007423 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007424 uint32_t address = dchecked_integral_cast<uint32_t>(
7425 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7426 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007427 __ LoadLiteral(out,
7428 base_or_current_method_reg,
7429 codegen_->DeduplicateBootImageAddressLiteral(address));
7430 break;
7431 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007432 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007433 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7434 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7435 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007436 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007437 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7438 bool reordering = __ SetReorder(false);
7439 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7440 temp,
7441 base_or_current_method_reg,
7442 info_low);
7443 GenerateGcRootFieldLoad(cls, out_loc, temp, /* placeholder */ 0x5678, read_barrier_option);
7444 __ SetReorder(reordering);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007445 generate_null_check = true;
7446 break;
7447 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007448 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007449 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7450 cls->GetTypeIndex(),
7451 cls->GetClass());
7452 bool reordering = __ SetReorder(false);
7453 __ Bind(&info->high_label);
7454 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007455 GenerateGcRootFieldLoad(cls, out_loc, out, /* placeholder */ 0x5678, read_barrier_option);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007456 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007457 break;
7458 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007459 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007460 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007461 LOG(FATAL) << "UNREACHABLE";
7462 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007463 }
7464
7465 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7466 DCHECK(cls->CanCallRuntime());
7467 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007468 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007469 codegen_->AddSlowPath(slow_path);
7470 if (generate_null_check) {
7471 __ Beqz(out, slow_path->GetEntryLabel());
7472 }
7473 if (cls->MustGenerateClinitCheck()) {
7474 GenerateClassInitializationCheck(slow_path, out);
7475 } else {
7476 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007477 }
7478 }
7479}
7480
7481static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007482 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007483}
7484
7485void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7486 LocationSummary* locations =
7487 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7488 locations->SetOut(Location::RequiresRegister());
7489}
7490
7491void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7492 Register out = load->GetLocations()->Out().AsRegister<Register>();
7493 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7494}
7495
7496void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7497 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7498}
7499
7500void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7501 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7502}
7503
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007504void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007505 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007506 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007507 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007508 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007509 switch (load_kind) {
7510 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007511 case HLoadString::LoadKind::kBootImageAddress:
7512 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007513 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007514 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007515 break;
7516 }
7517 FALLTHROUGH_INTENDED;
7518 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007519 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007520 locations->SetInAt(0, Location::RequiresRegister());
7521 break;
7522 default:
7523 break;
7524 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007525 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007526 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007527 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007528 } else {
7529 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007530 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7531 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7532 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007533 // Request a temp to hold the BSS entry location for the slow path.
7534 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007535 RegisterSet caller_saves = RegisterSet::Empty();
7536 InvokeRuntimeCallingConvention calling_convention;
7537 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7538 locations->SetCustomSlowPathCallerSaves(caller_saves);
7539 } else {
7540 // For non-Baker read barriers we have a temp-clobbering call.
7541 }
7542 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007543 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007544}
7545
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007546// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7547// move.
7548void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007549 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007550 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007551 Location out_loc = locations->Out();
7552 Register out = out_loc.AsRegister<Register>();
7553 Register base_or_current_method_reg;
7554 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7555 switch (load_kind) {
7556 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007557 case HLoadString::LoadKind::kBootImageAddress:
7558 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007559 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007560 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7561 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007562 default:
7563 base_or_current_method_reg = ZERO;
7564 break;
7565 }
7566
7567 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007568 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007569 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007570 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007571 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007572 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7573 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007574 bool reordering = __ SetReorder(false);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007575 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7576 out,
7577 base_or_current_method_reg,
7578 info_low);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007579 __ Addiu(out, out, /* placeholder */ 0x5678);
7580 __ SetReorder(reordering);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007581 return; // No dex cache slow path.
7582 }
7583 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007584 uint32_t address = dchecked_integral_cast<uint32_t>(
7585 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7586 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007587 __ LoadLiteral(out,
7588 base_or_current_method_reg,
7589 codegen_->DeduplicateBootImageAddressLiteral(address));
7590 return; // No dex cache slow path.
7591 }
Vladimir Markoaad75c62016-10-03 08:46:48 +00007592 case HLoadString::LoadKind::kBssEntry: {
7593 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007594 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007595 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007596 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7597 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007598 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007599 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
7600 bool reordering = __ SetReorder(false);
7601 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7602 temp,
7603 base_or_current_method_reg,
7604 info_low);
7605 GenerateGcRootFieldLoad(load,
7606 out_loc,
7607 temp,
7608 /* placeholder */ 0x5678,
7609 kCompilerReadBarrierOption);
7610 __ SetReorder(reordering);
7611 SlowPathCodeMIPS* slow_path =
7612 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007613 codegen_->AddSlowPath(slow_path);
7614 __ Beqz(out, slow_path->GetEntryLabel());
7615 __ Bind(slow_path->GetExitLabel());
7616 return;
7617 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007618 case HLoadString::LoadKind::kJitTableAddress: {
7619 CodeGeneratorMIPS::JitPatchInfo* info =
7620 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7621 load->GetStringIndex(),
7622 load->GetString());
7623 bool reordering = __ SetReorder(false);
7624 __ Bind(&info->high_label);
7625 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze15958152017-02-09 19:08:30 -08007626 GenerateGcRootFieldLoad(load,
7627 out_loc,
7628 out,
7629 /* placeholder */ 0x5678,
7630 kCompilerReadBarrierOption);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007631 __ SetReorder(reordering);
7632 return;
7633 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007634 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007635 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007636 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007637
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007638 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007639 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007640 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007641 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007642 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007643 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7644 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007645}
7646
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007647void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7648 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7649 locations->SetOut(Location::ConstantLocation(constant));
7650}
7651
7652void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7653 // Will be generated at use site.
7654}
7655
7656void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7657 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007658 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007659 InvokeRuntimeCallingConvention calling_convention;
7660 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7661}
7662
7663void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7664 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007665 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007666 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7667 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007668 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007669 }
7670 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
7671}
7672
7673void LocationsBuilderMIPS::VisitMul(HMul* mul) {
7674 LocationSummary* locations =
7675 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
7676 switch (mul->GetResultType()) {
7677 case Primitive::kPrimInt:
7678 case Primitive::kPrimLong:
7679 locations->SetInAt(0, Location::RequiresRegister());
7680 locations->SetInAt(1, Location::RequiresRegister());
7681 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7682 break;
7683
7684 case Primitive::kPrimFloat:
7685 case Primitive::kPrimDouble:
7686 locations->SetInAt(0, Location::RequiresFpuRegister());
7687 locations->SetInAt(1, Location::RequiresFpuRegister());
7688 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7689 break;
7690
7691 default:
7692 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
7693 }
7694}
7695
7696void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
7697 Primitive::Type type = instruction->GetType();
7698 LocationSummary* locations = instruction->GetLocations();
7699 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7700
7701 switch (type) {
7702 case Primitive::kPrimInt: {
7703 Register dst = locations->Out().AsRegister<Register>();
7704 Register lhs = locations->InAt(0).AsRegister<Register>();
7705 Register rhs = locations->InAt(1).AsRegister<Register>();
7706
7707 if (isR6) {
7708 __ MulR6(dst, lhs, rhs);
7709 } else {
7710 __ MulR2(dst, lhs, rhs);
7711 }
7712 break;
7713 }
7714 case Primitive::kPrimLong: {
7715 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7716 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7717 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7718 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
7719 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
7720 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
7721
7722 // Extra checks to protect caused by the existance of A1_A2.
7723 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
7724 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
7725 DCHECK_NE(dst_high, lhs_low);
7726 DCHECK_NE(dst_high, rhs_low);
7727
7728 // A_B * C_D
7729 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
7730 // dst_lo: [ low(B*D) ]
7731 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
7732
7733 if (isR6) {
7734 __ MulR6(TMP, lhs_high, rhs_low);
7735 __ MulR6(dst_high, lhs_low, rhs_high);
7736 __ Addu(dst_high, dst_high, TMP);
7737 __ MuhuR6(TMP, lhs_low, rhs_low);
7738 __ Addu(dst_high, dst_high, TMP);
7739 __ MulR6(dst_low, lhs_low, rhs_low);
7740 } else {
7741 __ MulR2(TMP, lhs_high, rhs_low);
7742 __ MulR2(dst_high, lhs_low, rhs_high);
7743 __ Addu(dst_high, dst_high, TMP);
7744 __ MultuR2(lhs_low, rhs_low);
7745 __ Mfhi(TMP);
7746 __ Addu(dst_high, dst_high, TMP);
7747 __ Mflo(dst_low);
7748 }
7749 break;
7750 }
7751 case Primitive::kPrimFloat:
7752 case Primitive::kPrimDouble: {
7753 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7754 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
7755 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
7756 if (type == Primitive::kPrimFloat) {
7757 __ MulS(dst, lhs, rhs);
7758 } else {
7759 __ MulD(dst, lhs, rhs);
7760 }
7761 break;
7762 }
7763 default:
7764 LOG(FATAL) << "Unexpected mul type " << type;
7765 }
7766}
7767
7768void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
7769 LocationSummary* locations =
7770 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
7771 switch (neg->GetResultType()) {
7772 case Primitive::kPrimInt:
7773 case Primitive::kPrimLong:
7774 locations->SetInAt(0, Location::RequiresRegister());
7775 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7776 break;
7777
7778 case Primitive::kPrimFloat:
7779 case Primitive::kPrimDouble:
7780 locations->SetInAt(0, Location::RequiresFpuRegister());
7781 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
7782 break;
7783
7784 default:
7785 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
7786 }
7787}
7788
7789void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
7790 Primitive::Type type = instruction->GetType();
7791 LocationSummary* locations = instruction->GetLocations();
7792
7793 switch (type) {
7794 case Primitive::kPrimInt: {
7795 Register dst = locations->Out().AsRegister<Register>();
7796 Register src = locations->InAt(0).AsRegister<Register>();
7797 __ Subu(dst, ZERO, src);
7798 break;
7799 }
7800 case Primitive::kPrimLong: {
7801 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7802 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7803 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7804 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7805 __ Subu(dst_low, ZERO, src_low);
7806 __ Sltu(TMP, ZERO, dst_low);
7807 __ Subu(dst_high, ZERO, src_high);
7808 __ Subu(dst_high, dst_high, TMP);
7809 break;
7810 }
7811 case Primitive::kPrimFloat:
7812 case Primitive::kPrimDouble: {
7813 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
7814 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
7815 if (type == Primitive::kPrimFloat) {
7816 __ NegS(dst, src);
7817 } else {
7818 __ NegD(dst, src);
7819 }
7820 break;
7821 }
7822 default:
7823 LOG(FATAL) << "Unexpected neg type " << type;
7824 }
7825}
7826
7827void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
7828 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007829 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007830 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007831 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007832 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7833 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007834}
7835
7836void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007837 // Note: if heap poisoning is enabled, the entry point takes care
7838 // of poisoning the reference.
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00007839 codegen_->InvokeRuntime(kQuickAllocArrayResolved, instruction, instruction->GetDexPc());
7840 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007841}
7842
7843void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
7844 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007845 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007846 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00007847 if (instruction->IsStringAlloc()) {
7848 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
7849 } else {
7850 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00007851 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007852 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
7853}
7854
7855void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08007856 // Note: if heap poisoning is enabled, the entry point takes care
7857 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00007858 if (instruction->IsStringAlloc()) {
7859 // String is allocated through StringFactory. Call NewEmptyString entry point.
7860 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07007861 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00007862 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
7863 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
7864 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007865 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00007866 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
7867 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007868 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00007869 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00007870 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007871}
7872
7873void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
7874 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7875 locations->SetInAt(0, Location::RequiresRegister());
7876 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7877}
7878
7879void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
7880 Primitive::Type type = instruction->GetType();
7881 LocationSummary* locations = instruction->GetLocations();
7882
7883 switch (type) {
7884 case Primitive::kPrimInt: {
7885 Register dst = locations->Out().AsRegister<Register>();
7886 Register src = locations->InAt(0).AsRegister<Register>();
7887 __ Nor(dst, src, ZERO);
7888 break;
7889 }
7890
7891 case Primitive::kPrimLong: {
7892 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
7893 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
7894 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
7895 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
7896 __ Nor(dst_high, src_high, ZERO);
7897 __ Nor(dst_low, src_low, ZERO);
7898 break;
7899 }
7900
7901 default:
7902 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
7903 }
7904}
7905
7906void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7907 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7908 locations->SetInAt(0, Location::RequiresRegister());
7909 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
7910}
7911
7912void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
7913 LocationSummary* locations = instruction->GetLocations();
7914 __ Xori(locations->Out().AsRegister<Register>(),
7915 locations->InAt(0).AsRegister<Register>(),
7916 1);
7917}
7918
7919void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01007920 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
7921 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007922}
7923
Calin Juravle2ae48182016-03-16 14:05:09 +00007924void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
7925 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007926 return;
7927 }
7928 Location obj = instruction->GetLocations()->InAt(0);
7929
7930 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00007931 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007932}
7933
Calin Juravle2ae48182016-03-16 14:05:09 +00007934void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007935 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00007936 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007937
7938 Location obj = instruction->GetLocations()->InAt(0);
7939
7940 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
7941}
7942
7943void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00007944 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007945}
7946
7947void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
7948 HandleBinaryOp(instruction);
7949}
7950
7951void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
7952 HandleBinaryOp(instruction);
7953}
7954
7955void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
7956 LOG(FATAL) << "Unreachable";
7957}
7958
7959void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
7960 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
7961}
7962
7963void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
7964 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
7965 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
7966 if (location.IsStackSlot()) {
7967 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7968 } else if (location.IsDoubleStackSlot()) {
7969 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
7970 }
7971 locations->SetOut(location);
7972}
7973
7974void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
7975 ATTRIBUTE_UNUSED) {
7976 // Nothing to do, the parameter is already at its location.
7977}
7978
7979void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
7980 LocationSummary* locations =
7981 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
7982 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
7983}
7984
7985void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
7986 ATTRIBUTE_UNUSED) {
7987 // Nothing to do, the method is already at its location.
7988}
7989
7990void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
7991 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01007992 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007993 locations->SetInAt(i, Location::Any());
7994 }
7995 locations->SetOut(Location::Any());
7996}
7997
7998void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
7999 LOG(FATAL) << "Unreachable";
8000}
8001
8002void LocationsBuilderMIPS::VisitRem(HRem* rem) {
8003 Primitive::Type type = rem->GetResultType();
8004 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008005 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008006 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
8007
8008 switch (type) {
8009 case Primitive::kPrimInt:
8010 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008011 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008012 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8013 break;
8014
8015 case Primitive::kPrimLong: {
8016 InvokeRuntimeCallingConvention calling_convention;
8017 locations->SetInAt(0, Location::RegisterPairLocation(
8018 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8019 locations->SetInAt(1, Location::RegisterPairLocation(
8020 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8021 locations->SetOut(calling_convention.GetReturnLocation(type));
8022 break;
8023 }
8024
8025 case Primitive::kPrimFloat:
8026 case Primitive::kPrimDouble: {
8027 InvokeRuntimeCallingConvention calling_convention;
8028 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8029 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8030 locations->SetOut(calling_convention.GetReturnLocation(type));
8031 break;
8032 }
8033
8034 default:
8035 LOG(FATAL) << "Unexpected rem type " << type;
8036 }
8037}
8038
8039void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8040 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008041
8042 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008043 case Primitive::kPrimInt:
8044 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008045 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008046 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008047 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008048 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8049 break;
8050 }
8051 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008052 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008053 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008054 break;
8055 }
8056 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008057 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008058 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008059 break;
8060 }
8061 default:
8062 LOG(FATAL) << "Unexpected rem type " << type;
8063 }
8064}
8065
Igor Murashkind01745e2017-04-05 16:40:31 -07008066void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8067 constructor_fence->SetLocations(nullptr);
8068}
8069
8070void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8071 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8072 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8073}
8074
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008075void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8076 memory_barrier->SetLocations(nullptr);
8077}
8078
8079void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8080 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8081}
8082
8083void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8084 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8085 Primitive::Type return_type = ret->InputAt(0)->GetType();
8086 locations->SetInAt(0, MipsReturnLocation(return_type));
8087}
8088
8089void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8090 codegen_->GenerateFrameExit();
8091}
8092
8093void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8094 ret->SetLocations(nullptr);
8095}
8096
8097void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8098 codegen_->GenerateFrameExit();
8099}
8100
Alexey Frunze92d90602015-12-18 18:16:36 -08008101void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8102 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008103}
8104
Alexey Frunze92d90602015-12-18 18:16:36 -08008105void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8106 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008107}
8108
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008109void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8110 HandleShift(shl);
8111}
8112
8113void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8114 HandleShift(shl);
8115}
8116
8117void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8118 HandleShift(shr);
8119}
8120
8121void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8122 HandleShift(shr);
8123}
8124
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008125void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8126 HandleBinaryOp(instruction);
8127}
8128
8129void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8130 HandleBinaryOp(instruction);
8131}
8132
8133void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8134 HandleFieldGet(instruction, instruction->GetFieldInfo());
8135}
8136
8137void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8138 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8139}
8140
8141void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8142 HandleFieldSet(instruction, instruction->GetFieldInfo());
8143}
8144
8145void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008146 HandleFieldSet(instruction,
8147 instruction->GetFieldInfo(),
8148 instruction->GetDexPc(),
8149 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008150}
8151
8152void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8153 HUnresolvedInstanceFieldGet* instruction) {
8154 FieldAccessCallingConventionMIPS calling_convention;
8155 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8156 instruction->GetFieldType(),
8157 calling_convention);
8158}
8159
8160void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8161 HUnresolvedInstanceFieldGet* instruction) {
8162 FieldAccessCallingConventionMIPS calling_convention;
8163 codegen_->GenerateUnresolvedFieldAccess(instruction,
8164 instruction->GetFieldType(),
8165 instruction->GetFieldIndex(),
8166 instruction->GetDexPc(),
8167 calling_convention);
8168}
8169
8170void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8171 HUnresolvedInstanceFieldSet* instruction) {
8172 FieldAccessCallingConventionMIPS calling_convention;
8173 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8174 instruction->GetFieldType(),
8175 calling_convention);
8176}
8177
8178void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8179 HUnresolvedInstanceFieldSet* instruction) {
8180 FieldAccessCallingConventionMIPS calling_convention;
8181 codegen_->GenerateUnresolvedFieldAccess(instruction,
8182 instruction->GetFieldType(),
8183 instruction->GetFieldIndex(),
8184 instruction->GetDexPc(),
8185 calling_convention);
8186}
8187
8188void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8189 HUnresolvedStaticFieldGet* instruction) {
8190 FieldAccessCallingConventionMIPS calling_convention;
8191 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8192 instruction->GetFieldType(),
8193 calling_convention);
8194}
8195
8196void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8197 HUnresolvedStaticFieldGet* instruction) {
8198 FieldAccessCallingConventionMIPS calling_convention;
8199 codegen_->GenerateUnresolvedFieldAccess(instruction,
8200 instruction->GetFieldType(),
8201 instruction->GetFieldIndex(),
8202 instruction->GetDexPc(),
8203 calling_convention);
8204}
8205
8206void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8207 HUnresolvedStaticFieldSet* instruction) {
8208 FieldAccessCallingConventionMIPS calling_convention;
8209 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8210 instruction->GetFieldType(),
8211 calling_convention);
8212}
8213
8214void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8215 HUnresolvedStaticFieldSet* instruction) {
8216 FieldAccessCallingConventionMIPS calling_convention;
8217 codegen_->GenerateUnresolvedFieldAccess(instruction,
8218 instruction->GetFieldType(),
8219 instruction->GetFieldIndex(),
8220 instruction->GetDexPc(),
8221 calling_convention);
8222}
8223
8224void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008225 LocationSummary* locations =
8226 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Vladimir Marko804b03f2016-09-14 16:26:36 +01008227 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008228}
8229
8230void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8231 HBasicBlock* block = instruction->GetBlock();
8232 if (block->GetLoopInformation() != nullptr) {
8233 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8234 // The back edge will generate the suspend check.
8235 return;
8236 }
8237 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8238 // The goto will generate the suspend check.
8239 return;
8240 }
8241 GenerateSuspendCheck(instruction, nullptr);
8242}
8243
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008244void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8245 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008246 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008247 InvokeRuntimeCallingConvention calling_convention;
8248 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8249}
8250
8251void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008252 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008253 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8254}
8255
8256void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8257 Primitive::Type input_type = conversion->GetInputType();
8258 Primitive::Type result_type = conversion->GetResultType();
8259 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008260 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008261
8262 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8263 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8264 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8265 }
8266
8267 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008268 if (!isR6 &&
8269 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8270 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008271 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008272 }
8273
8274 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8275
8276 if (call_kind == LocationSummary::kNoCall) {
8277 if (Primitive::IsFloatingPointType(input_type)) {
8278 locations->SetInAt(0, Location::RequiresFpuRegister());
8279 } else {
8280 locations->SetInAt(0, Location::RequiresRegister());
8281 }
8282
8283 if (Primitive::IsFloatingPointType(result_type)) {
8284 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8285 } else {
8286 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8287 }
8288 } else {
8289 InvokeRuntimeCallingConvention calling_convention;
8290
8291 if (Primitive::IsFloatingPointType(input_type)) {
8292 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8293 } else {
8294 DCHECK_EQ(input_type, Primitive::kPrimLong);
8295 locations->SetInAt(0, Location::RegisterPairLocation(
8296 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8297 }
8298
8299 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8300 }
8301}
8302
8303void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8304 LocationSummary* locations = conversion->GetLocations();
8305 Primitive::Type result_type = conversion->GetResultType();
8306 Primitive::Type input_type = conversion->GetInputType();
8307 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008308 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008309
8310 DCHECK_NE(input_type, result_type);
8311
8312 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8313 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8314 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8315 Register src = locations->InAt(0).AsRegister<Register>();
8316
Alexey Frunzea871ef12016-06-27 15:20:11 -07008317 if (dst_low != src) {
8318 __ Move(dst_low, src);
8319 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008320 __ Sra(dst_high, src, 31);
8321 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8322 Register dst = locations->Out().AsRegister<Register>();
8323 Register src = (input_type == Primitive::kPrimLong)
8324 ? locations->InAt(0).AsRegisterPairLow<Register>()
8325 : locations->InAt(0).AsRegister<Register>();
8326
8327 switch (result_type) {
8328 case Primitive::kPrimChar:
8329 __ Andi(dst, src, 0xFFFF);
8330 break;
8331 case Primitive::kPrimByte:
8332 if (has_sign_extension) {
8333 __ Seb(dst, src);
8334 } else {
8335 __ Sll(dst, src, 24);
8336 __ Sra(dst, dst, 24);
8337 }
8338 break;
8339 case Primitive::kPrimShort:
8340 if (has_sign_extension) {
8341 __ Seh(dst, src);
8342 } else {
8343 __ Sll(dst, src, 16);
8344 __ Sra(dst, dst, 16);
8345 }
8346 break;
8347 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008348 if (dst != src) {
8349 __ Move(dst, src);
8350 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008351 break;
8352
8353 default:
8354 LOG(FATAL) << "Unexpected type conversion from " << input_type
8355 << " to " << result_type;
8356 }
8357 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008358 if (input_type == Primitive::kPrimLong) {
8359 if (isR6) {
8360 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8361 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8362 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8363 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8364 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8365 __ Mtc1(src_low, FTMP);
8366 __ Mthc1(src_high, FTMP);
8367 if (result_type == Primitive::kPrimFloat) {
8368 __ Cvtsl(dst, FTMP);
8369 } else {
8370 __ Cvtdl(dst, FTMP);
8371 }
8372 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008373 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8374 : kQuickL2d;
8375 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008376 if (result_type == Primitive::kPrimFloat) {
8377 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8378 } else {
8379 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8380 }
8381 }
8382 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008383 Register src = locations->InAt(0).AsRegister<Register>();
8384 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8385 __ Mtc1(src, FTMP);
8386 if (result_type == Primitive::kPrimFloat) {
8387 __ Cvtsw(dst, FTMP);
8388 } else {
8389 __ Cvtdw(dst, FTMP);
8390 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008391 }
8392 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8393 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008394
8395 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8396 // value of the output type if the input is outside of the range after the truncation or
8397 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8398 // results. This matches the desired float/double-to-int/long conversion exactly.
8399 //
8400 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8401 // value when the input is either a NaN or is outside of the range of the output type
8402 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8403 // the same result.
8404 //
8405 // The code takes care of the different behaviors by first comparing the input to the
8406 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8407 // If the input is greater than or equal to the minimum, it procedes to the truncate
8408 // instruction, which will handle such an input the same way irrespective of NAN2008.
8409 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8410 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008411 if (result_type == Primitive::kPrimLong) {
8412 if (isR6) {
8413 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8414 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8415 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8416 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8417 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008418
8419 if (input_type == Primitive::kPrimFloat) {
8420 __ TruncLS(FTMP, src);
8421 } else {
8422 __ TruncLD(FTMP, src);
8423 }
8424 __ Mfc1(dst_low, FTMP);
8425 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008426 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008427 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8428 : kQuickD2l;
8429 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008430 if (input_type == Primitive::kPrimFloat) {
8431 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8432 } else {
8433 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8434 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008435 }
8436 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008437 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8438 Register dst = locations->Out().AsRegister<Register>();
8439 MipsLabel truncate;
8440 MipsLabel done;
8441
Lena Djokicf4e23a82017-05-09 15:43:45 +02008442 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008443 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008444 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8445 __ LoadConst32(TMP, min_val);
8446 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008447 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008448 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8449 __ LoadConst32(TMP, High32Bits(min_val));
8450 __ Mtc1(ZERO, FTMP);
8451 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008452 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008453
8454 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008455 __ ColeS(0, FTMP, src);
8456 } else {
8457 __ ColeD(0, FTMP, src);
8458 }
8459 __ Bc1t(0, &truncate);
8460
8461 if (input_type == Primitive::kPrimFloat) {
8462 __ CeqS(0, src, src);
8463 } else {
8464 __ CeqD(0, src, src);
8465 }
8466 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8467 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008468
8469 __ B(&done);
8470
8471 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008472 }
8473
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008474 if (input_type == Primitive::kPrimFloat) {
8475 __ TruncWS(FTMP, src);
8476 } else {
8477 __ TruncWD(FTMP, src);
8478 }
8479 __ Mfc1(dst, FTMP);
8480
Lena Djokicf4e23a82017-05-09 15:43:45 +02008481 if (!isR6) {
8482 __ Bind(&done);
8483 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008484 }
8485 } else if (Primitive::IsFloatingPointType(result_type) &&
8486 Primitive::IsFloatingPointType(input_type)) {
8487 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8488 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8489 if (result_type == Primitive::kPrimFloat) {
8490 __ Cvtsd(dst, src);
8491 } else {
8492 __ Cvtds(dst, src);
8493 }
8494 } else {
8495 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8496 << " to " << result_type;
8497 }
8498}
8499
8500void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8501 HandleShift(ushr);
8502}
8503
8504void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8505 HandleShift(ushr);
8506}
8507
8508void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8509 HandleBinaryOp(instruction);
8510}
8511
8512void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8513 HandleBinaryOp(instruction);
8514}
8515
8516void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8517 // Nothing to do, this should be removed during prepare for register allocator.
8518 LOG(FATAL) << "Unreachable";
8519}
8520
8521void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8522 // Nothing to do, this should be removed during prepare for register allocator.
8523 LOG(FATAL) << "Unreachable";
8524}
8525
8526void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008527 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008528}
8529
8530void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008531 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008532}
8533
8534void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008535 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008536}
8537
8538void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008539 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008540}
8541
8542void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008543 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008544}
8545
8546void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008547 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008548}
8549
8550void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008551 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008552}
8553
8554void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008555 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008556}
8557
8558void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008559 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008560}
8561
8562void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008563 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008564}
8565
8566void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008567 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008568}
8569
8570void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008571 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008572}
8573
8574void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008575 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008576}
8577
8578void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008579 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008580}
8581
8582void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008583 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008584}
8585
8586void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008587 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008588}
8589
8590void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008591 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008592}
8593
8594void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008595 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008596}
8597
8598void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008599 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008600}
8601
8602void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008603 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008604}
8605
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008606void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8607 LocationSummary* locations =
8608 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8609 locations->SetInAt(0, Location::RequiresRegister());
8610}
8611
Alexey Frunze96b66822016-09-10 02:32:44 -07008612void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8613 int32_t lower_bound,
8614 uint32_t num_entries,
8615 HBasicBlock* switch_block,
8616 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008617 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008618 Register temp_reg = TMP;
8619 __ Addiu32(temp_reg, value_reg, -lower_bound);
8620 // Jump to default if index is negative
8621 // Note: We don't check the case that index is positive while value < lower_bound, because in
8622 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8623 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8624
Alexey Frunze96b66822016-09-10 02:32:44 -07008625 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008626 // Jump to successors[0] if value == lower_bound.
8627 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8628 int32_t last_index = 0;
8629 for (; num_entries - last_index > 2; last_index += 2) {
8630 __ Addiu(temp_reg, temp_reg, -2);
8631 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8632 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8633 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8634 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8635 }
8636 if (num_entries - last_index == 2) {
8637 // The last missing case_value.
8638 __ Addiu(temp_reg, temp_reg, -1);
8639 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008640 }
8641
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008642 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008643 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008644 __ B(codegen_->GetLabelOf(default_block));
8645 }
8646}
8647
Alexey Frunze96b66822016-09-10 02:32:44 -07008648void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8649 Register constant_area,
8650 int32_t lower_bound,
8651 uint32_t num_entries,
8652 HBasicBlock* switch_block,
8653 HBasicBlock* default_block) {
8654 // Create a jump table.
8655 std::vector<MipsLabel*> labels(num_entries);
8656 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8657 for (uint32_t i = 0; i < num_entries; i++) {
8658 labels[i] = codegen_->GetLabelOf(successors[i]);
8659 }
8660 JumpTable* table = __ CreateJumpTable(std::move(labels));
8661
8662 // Is the value in range?
8663 __ Addiu32(TMP, value_reg, -lower_bound);
8664 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
8665 __ Sltiu(AT, TMP, num_entries);
8666 __ Beqz(AT, codegen_->GetLabelOf(default_block));
8667 } else {
8668 __ LoadConst32(AT, num_entries);
8669 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
8670 }
8671
8672 // We are in the range of the table.
8673 // Load the target address from the jump table, indexing by the value.
8674 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07008675 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07008676 __ Lw(TMP, TMP, 0);
8677 // Compute the absolute target address by adding the table start address
8678 // (the table contains offsets to targets relative to its start).
8679 __ Addu(TMP, TMP, AT);
8680 // And jump.
8681 __ Jr(TMP);
8682 __ NopIfNoReordering();
8683}
8684
8685void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8686 int32_t lower_bound = switch_instr->GetStartValue();
8687 uint32_t num_entries = switch_instr->GetNumEntries();
8688 LocationSummary* locations = switch_instr->GetLocations();
8689 Register value_reg = locations->InAt(0).AsRegister<Register>();
8690 HBasicBlock* switch_block = switch_instr->GetBlock();
8691 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8692
8693 if (codegen_->GetInstructionSetFeatures().IsR6() &&
8694 num_entries > kPackedSwitchJumpTableThreshold) {
8695 // R6 uses PC-relative addressing to access the jump table.
8696 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
8697 // the jump table and it is implemented by changing HPackedSwitch to
8698 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
8699 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
8700 GenTableBasedPackedSwitch(value_reg,
8701 ZERO,
8702 lower_bound,
8703 num_entries,
8704 switch_block,
8705 default_block);
8706 } else {
8707 GenPackedSwitchWithCompares(value_reg,
8708 lower_bound,
8709 num_entries,
8710 switch_block,
8711 default_block);
8712 }
8713}
8714
8715void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8716 LocationSummary* locations =
8717 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8718 locations->SetInAt(0, Location::RequiresRegister());
8719 // Constant area pointer (HMipsComputeBaseMethodAddress).
8720 locations->SetInAt(1, Location::RequiresRegister());
8721}
8722
8723void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
8724 int32_t lower_bound = switch_instr->GetStartValue();
8725 uint32_t num_entries = switch_instr->GetNumEntries();
8726 LocationSummary* locations = switch_instr->GetLocations();
8727 Register value_reg = locations->InAt(0).AsRegister<Register>();
8728 Register constant_area = locations->InAt(1).AsRegister<Register>();
8729 HBasicBlock* switch_block = switch_instr->GetBlock();
8730 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
8731
8732 // This is an R2-only path. HPackedSwitch has been changed to
8733 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
8734 // required to address the jump table relative to PC.
8735 GenTableBasedPackedSwitch(value_reg,
8736 constant_area,
8737 lower_bound,
8738 num_entries,
8739 switch_block,
8740 default_block);
8741}
8742
Alexey Frunzee3fb2452016-05-10 16:08:05 -07008743void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
8744 HMipsComputeBaseMethodAddress* insn) {
8745 LocationSummary* locations =
8746 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
8747 locations->SetOut(Location::RequiresRegister());
8748}
8749
8750void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
8751 HMipsComputeBaseMethodAddress* insn) {
8752 LocationSummary* locations = insn->GetLocations();
8753 Register reg = locations->Out().AsRegister<Register>();
8754
8755 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
8756
8757 // Generate a dummy PC-relative call to obtain PC.
8758 __ Nal();
8759 // Grab the return address off RA.
8760 __ Move(reg, RA);
8761
8762 // Remember this offset (the obtained PC value) for later use with constant area.
8763 __ BindPcRelBaseLabel();
8764}
8765
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008766void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8767 // The trampoline uses the same calling convention as dex calling conventions,
8768 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
8769 // the method_idx.
8770 HandleInvoke(invoke);
8771}
8772
8773void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
8774 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
8775}
8776
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008777void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8778 LocationSummary* locations =
8779 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8780 locations->SetInAt(0, Location::RequiresRegister());
8781 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008782}
8783
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008784void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
8785 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00008786 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008787 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008788 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008789 __ LoadFromOffset(kLoadWord,
8790 locations->Out().AsRegister<Register>(),
8791 locations->InAt(0).AsRegister<Register>(),
8792 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008793 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008794 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00008795 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00008796 __ LoadFromOffset(kLoadWord,
8797 locations->Out().AsRegister<Register>(),
8798 locations->InAt(0).AsRegister<Register>(),
8799 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01008800 __ LoadFromOffset(kLoadWord,
8801 locations->Out().AsRegister<Register>(),
8802 locations->Out().AsRegister<Register>(),
8803 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00008804 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00008805}
8806
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008807#undef __
8808#undef QUICK_ENTRY_POINT
8809
8810} // namespace mips
8811} // namespace art