blob: ac8f675e2da8cdda43e0af2c8a1d0d09bfccf8e6 [file] [log] [blame]
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_mips.h"
18
Alexey Frunze4147fcc2017-06-17 19:57:27 -070019#include "arch/mips/asm_support_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020020#include "arch/mips/entrypoints_direct_mips.h"
21#include "arch/mips/instruction_set_features_mips.h"
22#include "art_method.h"
Chris Larsen701566a2015-10-27 15:29:13 -070023#include "code_generator_utils.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010024#include "compiled_method.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020025#include "entrypoints/quick/quick_entrypoints.h"
26#include "entrypoints/quick/quick_entrypoints_enum.h"
27#include "gc/accounting/card_table.h"
28#include "intrinsics.h"
Chris Larsen701566a2015-10-27 15:29:13 -070029#include "intrinsics_mips.h"
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020030#include "mirror/array-inl.h"
31#include "mirror/class-inl.h"
32#include "offsets.h"
33#include "thread.h"
34#include "utils/assembler.h"
35#include "utils/mips/assembler_mips.h"
36#include "utils/stack_checks.h"
37
38namespace art {
39namespace mips {
40
41static constexpr int kCurrentMethodStackOffset = 0;
42static constexpr Register kMethodRegisterArgument = A0;
43
Alexey Frunze4147fcc2017-06-17 19:57:27 -070044// Flags controlling the use of thunks for Baker read barriers.
45constexpr bool kBakerReadBarrierThunksEnableForFields = true;
46constexpr bool kBakerReadBarrierThunksEnableForArrays = true;
47constexpr bool kBakerReadBarrierThunksEnableForGcRoots = true;
48
Goran Jakovljevicf652cec2015-08-25 16:11:42 +020049Location MipsReturnLocation(Primitive::Type return_type) {
50 switch (return_type) {
51 case Primitive::kPrimBoolean:
52 case Primitive::kPrimByte:
53 case Primitive::kPrimChar:
54 case Primitive::kPrimShort:
55 case Primitive::kPrimInt:
56 case Primitive::kPrimNot:
57 return Location::RegisterLocation(V0);
58
59 case Primitive::kPrimLong:
60 return Location::RegisterPairLocation(V0, V1);
61
62 case Primitive::kPrimFloat:
63 case Primitive::kPrimDouble:
64 return Location::FpuRegisterLocation(F0);
65
66 case Primitive::kPrimVoid:
67 return Location();
68 }
69 UNREACHABLE();
70}
71
72Location InvokeDexCallingConventionVisitorMIPS::GetReturnLocation(Primitive::Type type) const {
73 return MipsReturnLocation(type);
74}
75
76Location InvokeDexCallingConventionVisitorMIPS::GetMethodLocation() const {
77 return Location::RegisterLocation(kMethodRegisterArgument);
78}
79
80Location InvokeDexCallingConventionVisitorMIPS::GetNextLocation(Primitive::Type type) {
81 Location next_location;
82
83 switch (type) {
84 case Primitive::kPrimBoolean:
85 case Primitive::kPrimByte:
86 case Primitive::kPrimChar:
87 case Primitive::kPrimShort:
88 case Primitive::kPrimInt:
89 case Primitive::kPrimNot: {
90 uint32_t gp_index = gp_index_++;
91 if (gp_index < calling_convention.GetNumberOfRegisters()) {
92 next_location = Location::RegisterLocation(calling_convention.GetRegisterAt(gp_index));
93 } else {
94 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
95 next_location = Location::StackSlot(stack_offset);
96 }
97 break;
98 }
99
100 case Primitive::kPrimLong: {
101 uint32_t gp_index = gp_index_;
102 gp_index_ += 2;
103 if (gp_index + 1 < calling_convention.GetNumberOfRegisters()) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800104 Register reg = calling_convention.GetRegisterAt(gp_index);
105 if (reg == A1 || reg == A3) {
106 gp_index_++; // Skip A1(A3), and use A2_A3(T0_T1) instead.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200107 gp_index++;
108 }
109 Register low_even = calling_convention.GetRegisterAt(gp_index);
110 Register high_odd = calling_convention.GetRegisterAt(gp_index + 1);
111 DCHECK_EQ(low_even + 1, high_odd);
112 next_location = Location::RegisterPairLocation(low_even, high_odd);
113 } else {
114 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
115 next_location = Location::DoubleStackSlot(stack_offset);
116 }
117 break;
118 }
119
120 // Note: both float and double types are stored in even FPU registers. On 32 bit FPU, double
121 // will take up the even/odd pair, while floats are stored in even regs only.
122 // On 64 bit FPU, both double and float are stored in even registers only.
123 case Primitive::kPrimFloat:
124 case Primitive::kPrimDouble: {
125 uint32_t float_index = float_index_++;
126 if (float_index < calling_convention.GetNumberOfFpuRegisters()) {
127 next_location = Location::FpuRegisterLocation(
128 calling_convention.GetFpuRegisterAt(float_index));
129 } else {
130 size_t stack_offset = calling_convention.GetStackOffsetOf(stack_index_);
131 next_location = Primitive::Is64BitType(type) ? Location::DoubleStackSlot(stack_offset)
132 : Location::StackSlot(stack_offset);
133 }
134 break;
135 }
136
137 case Primitive::kPrimVoid:
138 LOG(FATAL) << "Unexpected parameter type " << type;
139 break;
140 }
141
142 // Space on the stack is reserved for all arguments.
143 stack_index_ += Primitive::Is64BitType(type) ? 2 : 1;
144
145 return next_location;
146}
147
148Location InvokeRuntimeCallingConvention::GetReturnLocation(Primitive::Type type) {
149 return MipsReturnLocation(type);
150}
151
Roland Levillain7cbd27f2016-08-11 23:53:33 +0100152// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
153#define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -0700154#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200155
156class BoundsCheckSlowPathMIPS : public SlowPathCodeMIPS {
157 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000158 explicit BoundsCheckSlowPathMIPS(HBoundsCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200159
160 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
161 LocationSummary* locations = instruction_->GetLocations();
162 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
163 __ Bind(GetEntryLabel());
164 if (instruction_->CanThrowIntoCatchBlock()) {
165 // Live registers will be restored in the catch block if caught.
166 SaveLiveRegisters(codegen, instruction_->GetLocations());
167 }
168 // We're moving two locations to locations that could overlap, so we need a parallel
169 // move resolver.
170 InvokeRuntimeCallingConvention calling_convention;
171 codegen->EmitParallelMoves(locations->InAt(0),
172 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
173 Primitive::kPrimInt,
174 locations->InAt(1),
175 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
176 Primitive::kPrimInt);
Serban Constantinescufca16662016-07-14 09:21:59 +0100177 QuickEntrypointEnum entrypoint = instruction_->AsBoundsCheck()->IsStringCharAt()
178 ? kQuickThrowStringBounds
179 : kQuickThrowArrayBounds;
180 mips_codegen->InvokeRuntime(entrypoint, instruction_, instruction_->GetDexPc(), this);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100181 CheckEntrypointTypes<kQuickThrowStringBounds, void, int32_t, int32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200182 CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
183 }
184
185 bool IsFatal() const OVERRIDE { return true; }
186
187 const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS"; }
188
189 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200190 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathMIPS);
191};
192
193class DivZeroCheckSlowPathMIPS : public SlowPathCodeMIPS {
194 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000195 explicit DivZeroCheckSlowPathMIPS(HDivZeroCheck* instruction) : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200196
197 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
198 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
199 __ Bind(GetEntryLabel());
Serban Constantinescufca16662016-07-14 09:21:59 +0100200 mips_codegen->InvokeRuntime(kQuickThrowDivZero, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200201 CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
202 }
203
204 bool IsFatal() const OVERRIDE { return true; }
205
206 const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS"; }
207
208 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200209 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathMIPS);
210};
211
212class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
213 public:
214 LoadClassSlowPathMIPS(HLoadClass* cls,
215 HInstruction* at,
216 uint32_t dex_pc,
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700217 bool do_clinit,
218 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr)
219 : SlowPathCodeMIPS(at),
220 cls_(cls),
221 dex_pc_(dex_pc),
222 do_clinit_(do_clinit),
223 bss_info_high_(bss_info_high) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200224 DCHECK(at->IsLoadClass() || at->IsClinitCheck());
225 }
226
227 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000228 LocationSummary* locations = instruction_->GetLocations();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700229 Location out = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200230 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700231 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700232 InvokeRuntimeCallingConvention calling_convention;
233 DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
234 const bool is_load_class_bss_entry =
235 (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200236 __ Bind(GetEntryLabel());
237 SaveLiveRegisters(codegen, locations);
238
Alexey Frunzec61c0762017-04-10 13:54:23 -0700239 // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
240 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700241 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700242 Register temp = locations->GetTemp(0).AsRegister<Register>();
243 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
244 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
245 // kSaveEverything call.
246 entry_address = temp_is_a0 ? out.AsRegister<Register>() : temp;
247 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
248 if (temp_is_a0) {
249 __ Move(entry_address, temp);
250 }
251 }
252
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000253 dex::TypeIndex type_index = cls_->GetTypeIndex();
254 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100255 QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
256 : kQuickInitializeType;
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000257 mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200258 if (do_clinit_) {
259 CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
260 } else {
261 CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
262 }
263
Alexey Frunzec61c0762017-04-10 13:54:23 -0700264 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700265 if (is_load_class_bss_entry && baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700266 // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700267 DCHECK(bss_info_high_);
268 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
269 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, bss_info_high_);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700270 __ Sw(calling_convention.GetRegisterAt(0),
271 entry_address,
272 /* placeholder */ 0x5678,
273 &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700274 }
275
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200276 // Move the class to the desired location.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200277 if (out.IsValid()) {
278 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000279 Primitive::Type type = instruction_->GetType();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700280 mips_codegen->MoveLocation(out,
281 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
282 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200283 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200284 RestoreLiveRegisters(codegen, locations);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700285
286 // For HLoadClass/kBssEntry, store the resolved class to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700287 if (is_load_class_bss_entry && !baker_or_no_read_barriers) {
288 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700289 // the class entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700290 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000291 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700292 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko1998cd02017-01-13 13:02:58 +0000293 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700294 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
295 mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index, info_high);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700296 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
297 __ Sw(out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000298 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200299 __ B(GetExitLabel());
300 }
301
302 const char* GetDescription() const OVERRIDE { return "LoadClassSlowPathMIPS"; }
303
304 private:
305 // The class this slow path will load.
306 HLoadClass* const cls_;
307
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200308 // The dex PC of `at_`.
309 const uint32_t dex_pc_;
310
311 // Whether to initialize the class.
312 const bool do_clinit_;
313
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700314 // Pointer to the high half PC-relative patch info for HLoadClass/kBssEntry.
315 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
316
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200317 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathMIPS);
318};
319
320class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
321 public:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700322 explicit LoadStringSlowPathMIPS(HLoadString* instruction,
323 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high)
324 : SlowPathCodeMIPS(instruction), bss_info_high_(bss_info_high) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200325
326 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700327 DCHECK(instruction_->IsLoadString());
328 DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200329 LocationSummary* locations = instruction_->GetLocations();
330 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
Alexey Frunzec61c0762017-04-10 13:54:23 -0700331 HLoadString* load = instruction_->AsLoadString();
332 const dex::StringIndex string_index = load->GetStringIndex();
333 Register out = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200334 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700335 const bool baker_or_no_read_barriers = (!kUseReadBarrier || kUseBakerReadBarrier);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700336 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200337 __ Bind(GetEntryLabel());
338 SaveLiveRegisters(codegen, locations);
339
Alexey Frunzec61c0762017-04-10 13:54:23 -0700340 // For HLoadString/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
341 Register entry_address = kNoRegister;
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700342 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700343 Register temp = locations->GetTemp(0).AsRegister<Register>();
344 bool temp_is_a0 = (temp == calling_convention.GetRegisterAt(0));
345 // In the unlucky case that `temp` is A0, we preserve the address in `out` across the
346 // kSaveEverything call.
347 entry_address = temp_is_a0 ? out : temp;
348 DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
349 if (temp_is_a0) {
350 __ Move(entry_address, temp);
351 }
352 }
353
Vladimir Marko6bec91c2017-01-09 15:03:12 +0000354 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
Serban Constantinescufca16662016-07-14 09:21:59 +0100355 mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200356 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700357
358 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700359 if (baker_or_no_read_barriers) {
Alexey Frunzec61c0762017-04-10 13:54:23 -0700360 // The string entry address was preserved in `entry_address` thanks to kSaveEverything.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700361 DCHECK(bss_info_high_);
362 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100363 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, bss_info_high_);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700364 __ Sw(calling_convention.GetRegisterAt(0),
365 entry_address,
366 /* placeholder */ 0x5678,
367 &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700368 }
369
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200370 Primitive::Type type = instruction_->GetType();
371 mips_codegen->MoveLocation(locations->Out(),
Alexey Frunzec61c0762017-04-10 13:54:23 -0700372 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200373 type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200374 RestoreLiveRegisters(codegen, locations);
Vladimir Markoaad75c62016-10-03 08:46:48 +0000375
Alexey Frunzec61c0762017-04-10 13:54:23 -0700376 // Store the resolved string to the BSS entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700377 if (!baker_or_no_read_barriers) {
378 // For non-Baker read barriers we need to re-calculate the address of
Alexey Frunzec61c0762017-04-10 13:54:23 -0700379 // the string entry.
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700380 const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Alexey Frunzec61c0762017-04-10 13:54:23 -0700381 Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700382 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100383 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700384 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +0100385 mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
Alexey Frunzea663d9d2017-07-31 18:43:18 -0700386 mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
387 __ Sw(out, TMP, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunzec61c0762017-04-10 13:54:23 -0700388 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200389 __ B(GetExitLabel());
390 }
391
392 const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathMIPS"; }
393
394 private:
Alexey Frunze5fa5c042017-06-01 21:07:52 -0700395 // Pointer to the high half PC-relative patch info.
396 const CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high_;
397
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200398 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathMIPS);
399};
400
401class NullCheckSlowPathMIPS : public SlowPathCodeMIPS {
402 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000403 explicit NullCheckSlowPathMIPS(HNullCheck* instr) : SlowPathCodeMIPS(instr) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200404
405 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
406 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
407 __ Bind(GetEntryLabel());
408 if (instruction_->CanThrowIntoCatchBlock()) {
409 // Live registers will be restored in the catch block if caught.
410 SaveLiveRegisters(codegen, instruction_->GetLocations());
411 }
Serban Constantinescufca16662016-07-14 09:21:59 +0100412 mips_codegen->InvokeRuntime(kQuickThrowNullPointer,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200413 instruction_,
414 instruction_->GetDexPc(),
Serban Constantinescufca16662016-07-14 09:21:59 +0100415 this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200416 CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
417 }
418
419 bool IsFatal() const OVERRIDE { return true; }
420
421 const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS"; }
422
423 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200424 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathMIPS);
425};
426
427class SuspendCheckSlowPathMIPS : public SlowPathCodeMIPS {
428 public:
429 SuspendCheckSlowPathMIPS(HSuspendCheck* instruction, HBasicBlock* successor)
David Srbecky9cd6d372016-02-09 15:24:47 +0000430 : SlowPathCodeMIPS(instruction), successor_(successor) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200431
432 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Lena Djokicca8c2952017-05-29 11:31:46 +0200433 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200434 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
435 __ Bind(GetEntryLabel());
Lena Djokicca8c2952017-05-29 11:31:46 +0200436 SaveLiveRegisters(codegen, locations); // Only saves live vector registers for SIMD.
Serban Constantinescufca16662016-07-14 09:21:59 +0100437 mips_codegen->InvokeRuntime(kQuickTestSuspend, instruction_, instruction_->GetDexPc(), this);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200438 CheckEntrypointTypes<kQuickTestSuspend, void, void>();
Lena Djokicca8c2952017-05-29 11:31:46 +0200439 RestoreLiveRegisters(codegen, locations); // Only restores live vector registers for SIMD.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200440 if (successor_ == nullptr) {
441 __ B(GetReturnLabel());
442 } else {
443 __ B(mips_codegen->GetLabelOf(successor_));
444 }
445 }
446
447 MipsLabel* GetReturnLabel() {
448 DCHECK(successor_ == nullptr);
449 return &return_label_;
450 }
451
452 const char* GetDescription() const OVERRIDE { return "SuspendCheckSlowPathMIPS"; }
453
454 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200455 // If not null, the block to branch to after the suspend check.
456 HBasicBlock* const successor_;
457
458 // If `successor_` is null, the label to branch to after the suspend check.
459 MipsLabel return_label_;
460
461 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathMIPS);
462};
463
464class TypeCheckSlowPathMIPS : public SlowPathCodeMIPS {
465 public:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800466 explicit TypeCheckSlowPathMIPS(HInstruction* instruction, bool is_fatal)
467 : SlowPathCodeMIPS(instruction), is_fatal_(is_fatal) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200468
469 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
470 LocationSummary* locations = instruction_->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200471 uint32_t dex_pc = instruction_->GetDexPc();
472 DCHECK(instruction_->IsCheckCast()
473 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
474 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
475
476 __ Bind(GetEntryLabel());
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800477 if (!is_fatal_) {
478 SaveLiveRegisters(codegen, locations);
479 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200480
481 // We're moving two locations to locations that could overlap, so we need a parallel
482 // move resolver.
483 InvokeRuntimeCallingConvention calling_convention;
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800484 codegen->EmitParallelMoves(locations->InAt(0),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200485 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
486 Primitive::kPrimNot,
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800487 locations->InAt(1),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200488 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
489 Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200490 if (instruction_->IsInstanceOf()) {
Serban Constantinescufca16662016-07-14 09:21:59 +0100491 mips_codegen->InvokeRuntime(kQuickInstanceofNonTrivial, instruction_, dex_pc, this);
Mathieu Chartier9fd8c602016-11-14 14:38:53 -0800492 CheckEntrypointTypes<kQuickInstanceofNonTrivial, size_t, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200493 Primitive::Type ret_type = instruction_->GetType();
494 Location ret_loc = calling_convention.GetReturnLocation(ret_type);
495 mips_codegen->MoveLocation(locations->Out(), ret_loc, ret_type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200496 } else {
497 DCHECK(instruction_->IsCheckCast());
Mathieu Chartierb99f4d62016-11-07 16:17:26 -0800498 mips_codegen->InvokeRuntime(kQuickCheckInstanceOf, instruction_, dex_pc, this);
499 CheckEntrypointTypes<kQuickCheckInstanceOf, void, mirror::Object*, mirror::Class*>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200500 }
501
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800502 if (!is_fatal_) {
503 RestoreLiveRegisters(codegen, locations);
504 __ B(GetExitLabel());
505 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200506 }
507
508 const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathMIPS"; }
509
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800510 bool IsFatal() const OVERRIDE { return is_fatal_; }
511
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200512 private:
Alexey Frunze66b69ad2017-02-24 00:51:44 -0800513 const bool is_fatal_;
514
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200515 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathMIPS);
516};
517
518class DeoptimizationSlowPathMIPS : public SlowPathCodeMIPS {
519 public:
Aart Bik42249c32016-01-07 15:33:50 -0800520 explicit DeoptimizationSlowPathMIPS(HDeoptimize* instruction)
David Srbecky9cd6d372016-02-09 15:24:47 +0000521 : SlowPathCodeMIPS(instruction) {}
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200522
523 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
Aart Bik42249c32016-01-07 15:33:50 -0800524 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200525 __ Bind(GetEntryLabel());
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100526 LocationSummary* locations = instruction_->GetLocations();
527 SaveLiveRegisters(codegen, locations);
528 InvokeRuntimeCallingConvention calling_convention;
529 __ LoadConst32(calling_convention.GetRegisterAt(0),
530 static_cast<uint32_t>(instruction_->AsDeoptimize()->GetDeoptimizationKind()));
Serban Constantinescufca16662016-07-14 09:21:59 +0100531 mips_codegen->InvokeRuntime(kQuickDeoptimize, instruction_, instruction_->GetDexPc(), this);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100532 CheckEntrypointTypes<kQuickDeoptimize, void, DeoptimizationKind>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200533 }
534
535 const char* GetDescription() const OVERRIDE { return "DeoptimizationSlowPathMIPS"; }
536
537 private:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200538 DISALLOW_COPY_AND_ASSIGN(DeoptimizationSlowPathMIPS);
539};
540
Alexey Frunze15958152017-02-09 19:08:30 -0800541class ArraySetSlowPathMIPS : public SlowPathCodeMIPS {
542 public:
543 explicit ArraySetSlowPathMIPS(HInstruction* instruction) : SlowPathCodeMIPS(instruction) {}
544
545 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
546 LocationSummary* locations = instruction_->GetLocations();
547 __ Bind(GetEntryLabel());
548 SaveLiveRegisters(codegen, locations);
549
550 InvokeRuntimeCallingConvention calling_convention;
551 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
552 parallel_move.AddMove(
553 locations->InAt(0),
554 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
555 Primitive::kPrimNot,
556 nullptr);
557 parallel_move.AddMove(
558 locations->InAt(1),
559 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
560 Primitive::kPrimInt,
561 nullptr);
562 parallel_move.AddMove(
563 locations->InAt(2),
564 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
565 Primitive::kPrimNot,
566 nullptr);
567 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
568
569 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
570 mips_codegen->InvokeRuntime(kQuickAputObject, instruction_, instruction_->GetDexPc(), this);
571 CheckEntrypointTypes<kQuickAputObject, void, mirror::Array*, int32_t, mirror::Object*>();
572 RestoreLiveRegisters(codegen, locations);
573 __ B(GetExitLabel());
574 }
575
576 const char* GetDescription() const OVERRIDE { return "ArraySetSlowPathMIPS"; }
577
578 private:
579 DISALLOW_COPY_AND_ASSIGN(ArraySetSlowPathMIPS);
580};
581
582// Slow path marking an object reference `ref` during a read
583// barrier. The field `obj.field` in the object `obj` holding this
584// reference does not get updated by this slow path after marking (see
585// ReadBarrierMarkAndUpdateFieldSlowPathMIPS below for that).
586//
587// This means that after the execution of this slow path, `ref` will
588// always be up-to-date, but `obj.field` may not; i.e., after the
589// flip, `ref` will be a to-space reference, but `obj.field` will
590// probably still be a from-space reference (unless it gets updated by
591// another thread, or if another thread installed another object
592// reference (different from `ref`) in `obj.field`).
593//
594// If `entrypoint` is a valid location it is assumed to already be
595// holding the entrypoint. The case where the entrypoint is passed in
596// is for the GcRoot read barrier.
597class ReadBarrierMarkSlowPathMIPS : public SlowPathCodeMIPS {
598 public:
599 ReadBarrierMarkSlowPathMIPS(HInstruction* instruction,
600 Location ref,
601 Location entrypoint = Location::NoLocation())
602 : SlowPathCodeMIPS(instruction), ref_(ref), entrypoint_(entrypoint) {
603 DCHECK(kEmitCompilerReadBarrier);
604 }
605
606 const char* GetDescription() const OVERRIDE { return "ReadBarrierMarkSlowPathMIPS"; }
607
608 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
609 LocationSummary* locations = instruction_->GetLocations();
610 Register ref_reg = ref_.AsRegister<Register>();
611 DCHECK(locations->CanCall());
612 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
613 DCHECK(instruction_->IsInstanceFieldGet() ||
614 instruction_->IsStaticFieldGet() ||
615 instruction_->IsArrayGet() ||
616 instruction_->IsArraySet() ||
617 instruction_->IsLoadClass() ||
618 instruction_->IsLoadString() ||
619 instruction_->IsInstanceOf() ||
620 instruction_->IsCheckCast() ||
621 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()) ||
622 (instruction_->IsInvokeStaticOrDirect() && instruction_->GetLocations()->Intrinsified()))
623 << "Unexpected instruction in read barrier marking slow path: "
624 << instruction_->DebugName();
625
626 __ Bind(GetEntryLabel());
627 // No need to save live registers; it's taken care of by the
628 // entrypoint. Also, there is no need to update the stack mask,
629 // as this runtime call will not trigger a garbage collection.
630 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
631 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
632 (S2 <= ref_reg && ref_reg <= S7) ||
633 (ref_reg == FP)) << ref_reg;
634 // "Compact" slow path, saving two moves.
635 //
636 // Instead of using the standard runtime calling convention (input
637 // and output in A0 and V0 respectively):
638 //
639 // A0 <- ref
640 // V0 <- ReadBarrierMark(A0)
641 // ref <- V0
642 //
643 // we just use rX (the register containing `ref`) as input and output
644 // of a dedicated entrypoint:
645 //
646 // rX <- ReadBarrierMarkRegX(rX)
647 //
648 if (entrypoint_.IsValid()) {
649 mips_codegen->ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction_, this);
650 DCHECK_EQ(entrypoint_.AsRegister<Register>(), T9);
651 __ Jalr(entrypoint_.AsRegister<Register>());
652 __ NopIfNoReordering();
653 } else {
654 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100655 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800656 // This runtime call does not require a stack map.
657 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
658 instruction_,
659 this,
660 /* direct */ false);
661 }
662 __ B(GetExitLabel());
663 }
664
665 private:
666 // The location (register) of the marked object reference.
667 const Location ref_;
668
669 // The location of the entrypoint if already loaded.
670 const Location entrypoint_;
671
672 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkSlowPathMIPS);
673};
674
675// Slow path marking an object reference `ref` during a read barrier,
676// and if needed, atomically updating the field `obj.field` in the
677// object `obj` holding this reference after marking (contrary to
678// ReadBarrierMarkSlowPathMIPS above, which never tries to update
679// `obj.field`).
680//
681// This means that after the execution of this slow path, both `ref`
682// and `obj.field` will be up-to-date; i.e., after the flip, both will
683// hold the same to-space reference (unless another thread installed
684// another object reference (different from `ref`) in `obj.field`).
685class ReadBarrierMarkAndUpdateFieldSlowPathMIPS : public SlowPathCodeMIPS {
686 public:
687 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(HInstruction* instruction,
688 Location ref,
689 Register obj,
690 Location field_offset,
691 Register temp1)
692 : SlowPathCodeMIPS(instruction),
693 ref_(ref),
694 obj_(obj),
695 field_offset_(field_offset),
696 temp1_(temp1) {
697 DCHECK(kEmitCompilerReadBarrier);
698 }
699
700 const char* GetDescription() const OVERRIDE {
701 return "ReadBarrierMarkAndUpdateFieldSlowPathMIPS";
702 }
703
704 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
705 LocationSummary* locations = instruction_->GetLocations();
706 Register ref_reg = ref_.AsRegister<Register>();
707 DCHECK(locations->CanCall());
708 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg;
709 // This slow path is only used by the UnsafeCASObject intrinsic.
710 DCHECK((instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
711 << "Unexpected instruction in read barrier marking and field updating slow path: "
712 << instruction_->DebugName();
713 DCHECK(instruction_->GetLocations()->Intrinsified());
714 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kUnsafeCASObject);
715 DCHECK(field_offset_.IsRegisterPair()) << field_offset_;
716
717 __ Bind(GetEntryLabel());
718
719 // Save the old reference.
720 // Note that we cannot use AT or TMP to save the old reference, as those
721 // are used by the code that follows, but we need the old reference after
722 // the call to the ReadBarrierMarkRegX entry point.
723 DCHECK_NE(temp1_, AT);
724 DCHECK_NE(temp1_, TMP);
725 __ Move(temp1_, ref_reg);
726
727 // No need to save live registers; it's taken care of by the
728 // entrypoint. Also, there is no need to update the stack mask,
729 // as this runtime call will not trigger a garbage collection.
730 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
731 DCHECK((V0 <= ref_reg && ref_reg <= T7) ||
732 (S2 <= ref_reg && ref_reg <= S7) ||
733 (ref_reg == FP)) << ref_reg;
734 // "Compact" slow path, saving two moves.
735 //
736 // Instead of using the standard runtime calling convention (input
737 // and output in A0 and V0 respectively):
738 //
739 // A0 <- ref
740 // V0 <- ReadBarrierMark(A0)
741 // ref <- V0
742 //
743 // we just use rX (the register containing `ref`) as input and output
744 // of a dedicated entrypoint:
745 //
746 // rX <- ReadBarrierMarkRegX(rX)
747 //
748 int32_t entry_point_offset =
Roland Levillain97c46462017-05-11 14:04:03 +0100749 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(ref_reg - 1);
Alexey Frunze15958152017-02-09 19:08:30 -0800750 // This runtime call does not require a stack map.
751 mips_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset,
752 instruction_,
753 this,
754 /* direct */ false);
755
756 // If the new reference is different from the old reference,
757 // update the field in the holder (`*(obj_ + field_offset_)`).
758 //
759 // Note that this field could also hold a different object, if
760 // another thread had concurrently changed it. In that case, the
761 // the compare-and-set (CAS) loop below would abort, leaving the
762 // field as-is.
763 MipsLabel done;
764 __ Beq(temp1_, ref_reg, &done);
765
766 // Update the the holder's field atomically. This may fail if
767 // mutator updates before us, but it's OK. This is achieved
768 // using a strong compare-and-set (CAS) operation with relaxed
769 // memory synchronization ordering, where the expected value is
770 // the old reference and the desired value is the new reference.
771
772 // Convenience aliases.
773 Register base = obj_;
774 // The UnsafeCASObject intrinsic uses a register pair as field
775 // offset ("long offset"), of which only the low part contains
776 // data.
777 Register offset = field_offset_.AsRegisterPairLow<Register>();
778 Register expected = temp1_;
779 Register value = ref_reg;
780 Register tmp_ptr = TMP; // Pointer to actual memory.
781 Register tmp = AT; // Value in memory.
782
783 __ Addu(tmp_ptr, base, offset);
784
785 if (kPoisonHeapReferences) {
786 __ PoisonHeapReference(expected);
787 // Do not poison `value` if it is the same register as
788 // `expected`, which has just been poisoned.
789 if (value != expected) {
790 __ PoisonHeapReference(value);
791 }
792 }
793
794 // do {
795 // tmp = [r_ptr] - expected;
796 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
797
798 bool is_r6 = mips_codegen->GetInstructionSetFeatures().IsR6();
799 MipsLabel loop_head, exit_loop;
800 __ Bind(&loop_head);
801 if (is_r6) {
802 __ LlR6(tmp, tmp_ptr);
803 } else {
804 __ LlR2(tmp, tmp_ptr);
805 }
806 __ Bne(tmp, expected, &exit_loop);
807 __ Move(tmp, value);
808 if (is_r6) {
809 __ ScR6(tmp, tmp_ptr);
810 } else {
811 __ ScR2(tmp, tmp_ptr);
812 }
813 __ Beqz(tmp, &loop_head);
814 __ Bind(&exit_loop);
815
816 if (kPoisonHeapReferences) {
817 __ UnpoisonHeapReference(expected);
818 // Do not unpoison `value` if it is the same register as
819 // `expected`, which has just been unpoisoned.
820 if (value != expected) {
821 __ UnpoisonHeapReference(value);
822 }
823 }
824
825 __ Bind(&done);
826 __ B(GetExitLabel());
827 }
828
829 private:
830 // The location (register) of the marked object reference.
831 const Location ref_;
832 // The register containing the object holding the marked object reference field.
833 const Register obj_;
834 // The location of the offset of the marked reference field within `obj_`.
835 Location field_offset_;
836
837 const Register temp1_;
838
839 DISALLOW_COPY_AND_ASSIGN(ReadBarrierMarkAndUpdateFieldSlowPathMIPS);
840};
841
842// Slow path generating a read barrier for a heap reference.
843class ReadBarrierForHeapReferenceSlowPathMIPS : public SlowPathCodeMIPS {
844 public:
845 ReadBarrierForHeapReferenceSlowPathMIPS(HInstruction* instruction,
846 Location out,
847 Location ref,
848 Location obj,
849 uint32_t offset,
850 Location index)
851 : SlowPathCodeMIPS(instruction),
852 out_(out),
853 ref_(ref),
854 obj_(obj),
855 offset_(offset),
856 index_(index) {
857 DCHECK(kEmitCompilerReadBarrier);
858 // If `obj` is equal to `out` or `ref`, it means the initial object
859 // has been overwritten by (or after) the heap object reference load
860 // to be instrumented, e.g.:
861 //
862 // __ LoadFromOffset(kLoadWord, out, out, offset);
863 // codegen_->GenerateReadBarrierSlow(instruction, out_loc, out_loc, out_loc, offset);
864 //
865 // In that case, we have lost the information about the original
866 // object, and the emitted read barrier cannot work properly.
867 DCHECK(!obj.Equals(out)) << "obj=" << obj << " out=" << out;
868 DCHECK(!obj.Equals(ref)) << "obj=" << obj << " ref=" << ref;
869 }
870
871 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
872 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
873 LocationSummary* locations = instruction_->GetLocations();
874 Register reg_out = out_.AsRegister<Register>();
875 DCHECK(locations->CanCall());
876 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
877 DCHECK(instruction_->IsInstanceFieldGet() ||
878 instruction_->IsStaticFieldGet() ||
879 instruction_->IsArrayGet() ||
880 instruction_->IsInstanceOf() ||
881 instruction_->IsCheckCast() ||
882 (instruction_->IsInvokeVirtual() && instruction_->GetLocations()->Intrinsified()))
883 << "Unexpected instruction in read barrier for heap reference slow path: "
884 << instruction_->DebugName();
885
886 __ Bind(GetEntryLabel());
887 SaveLiveRegisters(codegen, locations);
888
889 // We may have to change the index's value, but as `index_` is a
890 // constant member (like other "inputs" of this slow path),
891 // introduce a copy of it, `index`.
892 Location index = index_;
893 if (index_.IsValid()) {
894 // Handle `index_` for HArrayGet and UnsafeGetObject/UnsafeGetObjectVolatile intrinsics.
895 if (instruction_->IsArrayGet()) {
896 // Compute the actual memory offset and store it in `index`.
897 Register index_reg = index_.AsRegister<Register>();
898 DCHECK(locations->GetLiveRegisters()->ContainsCoreRegister(index_reg));
899 if (codegen->IsCoreCalleeSaveRegister(index_reg)) {
900 // We are about to change the value of `index_reg` (see the
901 // calls to art::mips::MipsAssembler::Sll and
902 // art::mips::MipsAssembler::Addiu32 below), but it has
903 // not been saved by the previous call to
904 // art::SlowPathCode::SaveLiveRegisters, as it is a
905 // callee-save register --
906 // art::SlowPathCode::SaveLiveRegisters does not consider
907 // callee-save registers, as it has been designed with the
908 // assumption that callee-save registers are supposed to be
909 // handled by the called function. So, as a callee-save
910 // register, `index_reg` _would_ eventually be saved onto
911 // the stack, but it would be too late: we would have
912 // changed its value earlier. Therefore, we manually save
913 // it here into another freely available register,
914 // `free_reg`, chosen of course among the caller-save
915 // registers (as a callee-save `free_reg` register would
916 // exhibit the same problem).
917 //
918 // Note we could have requested a temporary register from
919 // the register allocator instead; but we prefer not to, as
920 // this is a slow path, and we know we can find a
921 // caller-save register that is available.
922 Register free_reg = FindAvailableCallerSaveRegister(codegen);
923 __ Move(free_reg, index_reg);
924 index_reg = free_reg;
925 index = Location::RegisterLocation(index_reg);
926 } else {
927 // The initial register stored in `index_` has already been
928 // saved in the call to art::SlowPathCode::SaveLiveRegisters
929 // (as it is not a callee-save register), so we can freely
930 // use it.
931 }
932 // Shifting the index value contained in `index_reg` by the scale
933 // factor (2) cannot overflow in practice, as the runtime is
934 // unable to allocate object arrays with a size larger than
935 // 2^26 - 1 (that is, 2^28 - 4 bytes).
936 __ Sll(index_reg, index_reg, TIMES_4);
937 static_assert(
938 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
939 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
940 __ Addiu32(index_reg, index_reg, offset_);
941 } else {
942 // In the case of the UnsafeGetObject/UnsafeGetObjectVolatile
943 // intrinsics, `index_` is not shifted by a scale factor of 2
944 // (as in the case of ArrayGet), as it is actually an offset
945 // to an object field within an object.
946 DCHECK(instruction_->IsInvoke()) << instruction_->DebugName();
947 DCHECK(instruction_->GetLocations()->Intrinsified());
948 DCHECK((instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObject) ||
949 (instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile))
950 << instruction_->AsInvoke()->GetIntrinsic();
951 DCHECK_EQ(offset_, 0U);
952 DCHECK(index_.IsRegisterPair());
953 // UnsafeGet's offset location is a register pair, the low
954 // part contains the correct offset.
955 index = index_.ToLow();
956 }
957 }
958
959 // We're moving two or three locations to locations that could
960 // overlap, so we need a parallel move resolver.
961 InvokeRuntimeCallingConvention calling_convention;
962 HParallelMove parallel_move(codegen->GetGraph()->GetArena());
963 parallel_move.AddMove(ref_,
964 Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
965 Primitive::kPrimNot,
966 nullptr);
967 parallel_move.AddMove(obj_,
968 Location::RegisterLocation(calling_convention.GetRegisterAt(1)),
969 Primitive::kPrimNot,
970 nullptr);
971 if (index.IsValid()) {
972 parallel_move.AddMove(index,
973 Location::RegisterLocation(calling_convention.GetRegisterAt(2)),
974 Primitive::kPrimInt,
975 nullptr);
976 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
977 } else {
978 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
979 __ LoadConst32(calling_convention.GetRegisterAt(2), offset_);
980 }
981 mips_codegen->InvokeRuntime(kQuickReadBarrierSlow,
982 instruction_,
983 instruction_->GetDexPc(),
984 this);
985 CheckEntrypointTypes<
986 kQuickReadBarrierSlow, mirror::Object*, mirror::Object*, mirror::Object*, uint32_t>();
Lena Djokic8098da92017-06-28 12:07:50 +0200987 mips_codegen->MoveLocation(out_,
988 calling_convention.GetReturnLocation(Primitive::kPrimNot),
989 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -0800990
991 RestoreLiveRegisters(codegen, locations);
992 __ B(GetExitLabel());
993 }
994
995 const char* GetDescription() const OVERRIDE { return "ReadBarrierForHeapReferenceSlowPathMIPS"; }
996
997 private:
998 Register FindAvailableCallerSaveRegister(CodeGenerator* codegen) {
999 size_t ref = static_cast<int>(ref_.AsRegister<Register>());
1000 size_t obj = static_cast<int>(obj_.AsRegister<Register>());
1001 for (size_t i = 0, e = codegen->GetNumberOfCoreRegisters(); i < e; ++i) {
1002 if (i != ref &&
1003 i != obj &&
1004 !codegen->IsCoreCalleeSaveRegister(i) &&
1005 !codegen->IsBlockedCoreRegister(i)) {
1006 return static_cast<Register>(i);
1007 }
1008 }
1009 // We shall never fail to find a free caller-save register, as
1010 // there are more than two core caller-save registers on MIPS
1011 // (meaning it is possible to find one which is different from
1012 // `ref` and `obj`).
1013 DCHECK_GT(codegen->GetNumberOfCoreCallerSaveRegisters(), 2u);
1014 LOG(FATAL) << "Could not find a free caller-save register";
1015 UNREACHABLE();
1016 }
1017
1018 const Location out_;
1019 const Location ref_;
1020 const Location obj_;
1021 const uint32_t offset_;
1022 // An additional location containing an index to an array.
1023 // Only used for HArrayGet and the UnsafeGetObject &
1024 // UnsafeGetObjectVolatile intrinsics.
1025 const Location index_;
1026
1027 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForHeapReferenceSlowPathMIPS);
1028};
1029
1030// Slow path generating a read barrier for a GC root.
1031class ReadBarrierForRootSlowPathMIPS : public SlowPathCodeMIPS {
1032 public:
1033 ReadBarrierForRootSlowPathMIPS(HInstruction* instruction, Location out, Location root)
1034 : SlowPathCodeMIPS(instruction), out_(out), root_(root) {
1035 DCHECK(kEmitCompilerReadBarrier);
1036 }
1037
1038 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
1039 LocationSummary* locations = instruction_->GetLocations();
1040 Register reg_out = out_.AsRegister<Register>();
1041 DCHECK(locations->CanCall());
1042 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(reg_out));
1043 DCHECK(instruction_->IsLoadClass() || instruction_->IsLoadString())
1044 << "Unexpected instruction in read barrier for GC root slow path: "
1045 << instruction_->DebugName();
1046
1047 __ Bind(GetEntryLabel());
1048 SaveLiveRegisters(codegen, locations);
1049
1050 InvokeRuntimeCallingConvention calling_convention;
1051 CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
Lena Djokic8098da92017-06-28 12:07:50 +02001052 mips_codegen->MoveLocation(Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
1053 root_,
1054 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001055 mips_codegen->InvokeRuntime(kQuickReadBarrierForRootSlow,
1056 instruction_,
1057 instruction_->GetDexPc(),
1058 this);
1059 CheckEntrypointTypes<kQuickReadBarrierForRootSlow, mirror::Object*, GcRoot<mirror::Object>*>();
Lena Djokic8098da92017-06-28 12:07:50 +02001060 mips_codegen->MoveLocation(out_,
1061 calling_convention.GetReturnLocation(Primitive::kPrimNot),
1062 Primitive::kPrimNot);
Alexey Frunze15958152017-02-09 19:08:30 -08001063
1064 RestoreLiveRegisters(codegen, locations);
1065 __ B(GetExitLabel());
1066 }
1067
1068 const char* GetDescription() const OVERRIDE { return "ReadBarrierForRootSlowPathMIPS"; }
1069
1070 private:
1071 const Location out_;
1072 const Location root_;
1073
1074 DISALLOW_COPY_AND_ASSIGN(ReadBarrierForRootSlowPathMIPS);
1075};
1076
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001077CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
1078 const MipsInstructionSetFeatures& isa_features,
1079 const CompilerOptions& compiler_options,
1080 OptimizingCompilerStats* stats)
1081 : CodeGenerator(graph,
1082 kNumberOfCoreRegisters,
1083 kNumberOfFRegisters,
1084 kNumberOfRegisterPairs,
1085 ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
1086 arraysize(kCoreCalleeSaves)),
1087 ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
1088 arraysize(kFpuCalleeSaves)),
1089 compiler_options,
1090 stats),
1091 block_labels_(nullptr),
1092 location_builder_(graph, this),
1093 instruction_visitor_(graph, this),
1094 move_resolver_(graph->GetArena(), this),
Vladimir Marko93205e32016-04-13 11:59:46 +01001095 assembler_(graph->GetArena(), &isa_features),
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001096 isa_features_(isa_features),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001097 uint32_literals_(std::less<uint32_t>(),
1098 graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001099 pc_relative_method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001100 method_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001101 pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko1998cd02017-01-13 13:02:58 +00001102 type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko65979462017-05-19 17:25:12 +01001103 pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001104 string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze627c1a02017-01-30 19:28:14 -08001105 jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
1106 jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
Alexey Frunze06a46c42016-07-19 15:00:40 -07001107 clobbered_ra_(false) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001108 // Save RA (containing the return address) to mimic Quick.
1109 AddAllocatedRegister(Location::RegisterLocation(RA));
1110}
1111
1112#undef __
Roland Levillain7cbd27f2016-08-11 23:53:33 +01001113// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
1114#define __ down_cast<MipsAssembler*>(GetAssembler())-> // NOLINT
Andreas Gampe542451c2016-07-26 09:02:02 -07001115#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kMipsPointerSize, x).Int32Value()
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001116
1117void CodeGeneratorMIPS::Finalize(CodeAllocator* allocator) {
1118 // Ensure that we fix up branches.
1119 __ FinalizeCode();
1120
1121 // Adjust native pc offsets in stack maps.
1122 for (size_t i = 0, num = stack_map_stream_.GetNumberOfStackMaps(); i != num; ++i) {
Mathieu Chartiera2f526f2017-01-19 14:48:48 -08001123 uint32_t old_position =
1124 stack_map_stream_.GetStackMap(i).native_pc_code_offset.Uint32Value(kMips);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001125 uint32_t new_position = __ GetAdjustedPosition(old_position);
1126 DCHECK_GE(new_position, old_position);
1127 stack_map_stream_.SetStackMapNativePcOffset(i, new_position);
1128 }
1129
1130 // Adjust pc offsets for the disassembly information.
1131 if (disasm_info_ != nullptr) {
1132 GeneratedCodeInterval* frame_entry_interval = disasm_info_->GetFrameEntryInterval();
1133 frame_entry_interval->start = __ GetAdjustedPosition(frame_entry_interval->start);
1134 frame_entry_interval->end = __ GetAdjustedPosition(frame_entry_interval->end);
1135 for (auto& it : *disasm_info_->GetInstructionIntervals()) {
1136 it.second.start = __ GetAdjustedPosition(it.second.start);
1137 it.second.end = __ GetAdjustedPosition(it.second.end);
1138 }
1139 for (auto& it : *disasm_info_->GetSlowPathIntervals()) {
1140 it.code_interval.start = __ GetAdjustedPosition(it.code_interval.start);
1141 it.code_interval.end = __ GetAdjustedPosition(it.code_interval.end);
1142 }
1143 }
1144
1145 CodeGenerator::Finalize(allocator);
1146}
1147
1148MipsAssembler* ParallelMoveResolverMIPS::GetAssembler() const {
1149 return codegen_->GetAssembler();
1150}
1151
1152void ParallelMoveResolverMIPS::EmitMove(size_t index) {
1153 DCHECK_LT(index, moves_.size());
1154 MoveOperands* move = moves_[index];
1155 codegen_->MoveLocation(move->GetDestination(), move->GetSource(), move->GetType());
1156}
1157
1158void ParallelMoveResolverMIPS::EmitSwap(size_t index) {
1159 DCHECK_LT(index, moves_.size());
1160 MoveOperands* move = moves_[index];
1161 Primitive::Type type = move->GetType();
1162 Location loc1 = move->GetDestination();
1163 Location loc2 = move->GetSource();
1164
1165 DCHECK(!loc1.IsConstant());
1166 DCHECK(!loc2.IsConstant());
1167
1168 if (loc1.Equals(loc2)) {
1169 return;
1170 }
1171
1172 if (loc1.IsRegister() && loc2.IsRegister()) {
1173 // Swap 2 GPRs.
1174 Register r1 = loc1.AsRegister<Register>();
1175 Register r2 = loc2.AsRegister<Register>();
1176 __ Move(TMP, r2);
1177 __ Move(r2, r1);
1178 __ Move(r1, TMP);
1179 } else if (loc1.IsFpuRegister() && loc2.IsFpuRegister()) {
1180 FRegister f1 = loc1.AsFpuRegister<FRegister>();
1181 FRegister f2 = loc2.AsFpuRegister<FRegister>();
1182 if (type == Primitive::kPrimFloat) {
1183 __ MovS(FTMP, f2);
1184 __ MovS(f2, f1);
1185 __ MovS(f1, FTMP);
1186 } else {
1187 DCHECK_EQ(type, Primitive::kPrimDouble);
1188 __ MovD(FTMP, f2);
1189 __ MovD(f2, f1);
1190 __ MovD(f1, FTMP);
1191 }
1192 } else if ((loc1.IsRegister() && loc2.IsFpuRegister()) ||
1193 (loc1.IsFpuRegister() && loc2.IsRegister())) {
1194 // Swap FPR and GPR.
1195 DCHECK_EQ(type, Primitive::kPrimFloat); // Can only swap a float.
1196 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1197 : loc2.AsFpuRegister<FRegister>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001198 Register r2 = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001199 __ Move(TMP, r2);
1200 __ Mfc1(r2, f1);
1201 __ Mtc1(TMP, f1);
1202 } else if (loc1.IsRegisterPair() && loc2.IsRegisterPair()) {
1203 // Swap 2 GPR register pairs.
1204 Register r1 = loc1.AsRegisterPairLow<Register>();
1205 Register r2 = loc2.AsRegisterPairLow<Register>();
1206 __ Move(TMP, r2);
1207 __ Move(r2, r1);
1208 __ Move(r1, TMP);
1209 r1 = loc1.AsRegisterPairHigh<Register>();
1210 r2 = loc2.AsRegisterPairHigh<Register>();
1211 __ Move(TMP, r2);
1212 __ Move(r2, r1);
1213 __ Move(r1, TMP);
1214 } else if ((loc1.IsRegisterPair() && loc2.IsFpuRegister()) ||
1215 (loc1.IsFpuRegister() && loc2.IsRegisterPair())) {
1216 // Swap FPR and GPR register pair.
1217 DCHECK_EQ(type, Primitive::kPrimDouble);
1218 FRegister f1 = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1219 : loc2.AsFpuRegister<FRegister>();
1220 Register r2_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1221 : loc2.AsRegisterPairLow<Register>();
1222 Register r2_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1223 : loc2.AsRegisterPairHigh<Register>();
1224 // Use 2 temporary registers because we can't first swap the low 32 bits of an FPR and
1225 // then swap the high 32 bits of the same FPR. mtc1 makes the high 32 bits of an FPR
1226 // unpredictable and the following mfch1 will fail.
1227 __ Mfc1(TMP, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001228 __ MoveFromFpuHigh(AT, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001229 __ Mtc1(r2_l, f1);
Alexey Frunzebb9863a2016-01-11 15:51:16 -08001230 __ MoveToFpuHigh(r2_h, f1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001231 __ Move(r2_l, TMP);
1232 __ Move(r2_h, AT);
1233 } else if (loc1.IsStackSlot() && loc2.IsStackSlot()) {
1234 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ false);
1235 } else if (loc1.IsDoubleStackSlot() && loc2.IsDoubleStackSlot()) {
1236 Exchange(loc1.GetStackIndex(), loc2.GetStackIndex(), /* double_slot */ true);
David Brazdilcc0f3112016-01-28 17:14:52 +00001237 } else if ((loc1.IsRegister() && loc2.IsStackSlot()) ||
1238 (loc1.IsStackSlot() && loc2.IsRegister())) {
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001239 Register reg = loc1.IsRegister() ? loc1.AsRegister<Register>() : loc2.AsRegister<Register>();
1240 intptr_t offset = loc1.IsStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001241 __ Move(TMP, reg);
1242 __ LoadFromOffset(kLoadWord, reg, SP, offset);
1243 __ StoreToOffset(kStoreWord, TMP, SP, offset);
1244 } else if ((loc1.IsRegisterPair() && loc2.IsDoubleStackSlot()) ||
1245 (loc1.IsDoubleStackSlot() && loc2.IsRegisterPair())) {
1246 Register reg_l = loc1.IsRegisterPair() ? loc1.AsRegisterPairLow<Register>()
1247 : loc2.AsRegisterPairLow<Register>();
1248 Register reg_h = loc1.IsRegisterPair() ? loc1.AsRegisterPairHigh<Register>()
1249 : loc2.AsRegisterPairHigh<Register>();
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001250 intptr_t offset_l = loc1.IsDoubleStackSlot() ? loc1.GetStackIndex() : loc2.GetStackIndex();
David Brazdilcc0f3112016-01-28 17:14:52 +00001251 intptr_t offset_h = loc1.IsDoubleStackSlot() ? loc1.GetHighStackIndex(kMipsWordSize)
1252 : loc2.GetHighStackIndex(kMipsWordSize);
1253 __ Move(TMP, reg_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001254 __ LoadFromOffset(kLoadWord, reg_l, SP, offset_l);
David Brazdilcc0f3112016-01-28 17:14:52 +00001255 __ StoreToOffset(kStoreWord, TMP, SP, offset_l);
David Brazdil04d3e872016-01-29 09:50:09 +00001256 __ Move(TMP, reg_h);
1257 __ LoadFromOffset(kLoadWord, reg_h, SP, offset_h);
1258 __ StoreToOffset(kStoreWord, TMP, SP, offset_h);
Goran Jakovljevic35dfcaa2016-09-22 09:26:01 +02001259 } else if (loc1.IsFpuRegister() || loc2.IsFpuRegister()) {
1260 FRegister reg = loc1.IsFpuRegister() ? loc1.AsFpuRegister<FRegister>()
1261 : loc2.AsFpuRegister<FRegister>();
1262 intptr_t offset = loc1.IsFpuRegister() ? loc2.GetStackIndex() : loc1.GetStackIndex();
1263 if (type == Primitive::kPrimFloat) {
1264 __ MovS(FTMP, reg);
1265 __ LoadSFromOffset(reg, SP, offset);
1266 __ StoreSToOffset(FTMP, SP, offset);
1267 } else {
1268 DCHECK_EQ(type, Primitive::kPrimDouble);
1269 __ MovD(FTMP, reg);
1270 __ LoadDFromOffset(reg, SP, offset);
1271 __ StoreDToOffset(FTMP, SP, offset);
1272 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001273 } else {
1274 LOG(FATAL) << "Swap between " << loc1 << " and " << loc2 << " is unsupported";
1275 }
1276}
1277
1278void ParallelMoveResolverMIPS::RestoreScratch(int reg) {
1279 __ Pop(static_cast<Register>(reg));
1280}
1281
1282void ParallelMoveResolverMIPS::SpillScratch(int reg) {
1283 __ Push(static_cast<Register>(reg));
1284}
1285
1286void ParallelMoveResolverMIPS::Exchange(int index1, int index2, bool double_slot) {
1287 // Allocate a scratch register other than TMP, if available.
1288 // Else, spill V0 (arbitrary choice) and use it as a scratch register (it will be
1289 // automatically unspilled when the scratch scope object is destroyed).
1290 ScratchRegisterScope ensure_scratch(this, TMP, V0, codegen_->GetNumberOfCoreRegisters());
1291 // If V0 spills onto the stack, SP-relative offsets need to be adjusted.
1292 int stack_offset = ensure_scratch.IsSpilled() ? kMipsWordSize : 0;
1293 for (int i = 0; i <= (double_slot ? 1 : 0); i++, stack_offset += kMipsWordSize) {
1294 __ LoadFromOffset(kLoadWord,
1295 Register(ensure_scratch.GetRegister()),
1296 SP,
1297 index1 + stack_offset);
1298 __ LoadFromOffset(kLoadWord,
1299 TMP,
1300 SP,
1301 index2 + stack_offset);
1302 __ StoreToOffset(kStoreWord,
1303 Register(ensure_scratch.GetRegister()),
1304 SP,
1305 index2 + stack_offset);
1306 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset);
1307 }
1308}
1309
Alexey Frunze73296a72016-06-03 22:51:46 -07001310void CodeGeneratorMIPS::ComputeSpillMask() {
1311 core_spill_mask_ = allocated_registers_.GetCoreRegisters() & core_callee_save_mask_;
1312 fpu_spill_mask_ = allocated_registers_.GetFloatingPointRegisters() & fpu_callee_save_mask_;
1313 DCHECK_NE(core_spill_mask_, 0u) << "At least the return address register must be saved";
1314 // If there're FPU callee-saved registers and there's an odd number of GPR callee-saved
1315 // registers, include the ZERO register to force alignment of FPU callee-saved registers
1316 // within the stack frame.
1317 if ((fpu_spill_mask_ != 0) && (POPCOUNT(core_spill_mask_) % 2 != 0)) {
1318 core_spill_mask_ |= (1 << ZERO);
1319 }
Alexey Frunze58320ce2016-08-30 21:40:46 -07001320}
1321
1322bool CodeGeneratorMIPS::HasAllocatedCalleeSaveRegisters() const {
Alexey Frunze06a46c42016-07-19 15:00:40 -07001323 // If RA is clobbered by PC-relative operations on R2 and it's the only spilled register
Alexey Frunze58320ce2016-08-30 21:40:46 -07001324 // (this can happen in leaf methods), force CodeGenerator::InitializeCodeGeneration()
1325 // into the path that creates a stack frame so that RA can be explicitly saved and restored.
1326 // RA can't otherwise be saved/restored when it's the only spilled register.
Alexey Frunze58320ce2016-08-30 21:40:46 -07001327 return CodeGenerator::HasAllocatedCalleeSaveRegisters() || clobbered_ra_;
Alexey Frunze73296a72016-06-03 22:51:46 -07001328}
1329
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001330static dwarf::Reg DWARFReg(Register reg) {
1331 return dwarf::Reg::MipsCore(static_cast<int>(reg));
1332}
1333
1334// TODO: mapping of floating-point registers to DWARF.
1335
1336void CodeGeneratorMIPS::GenerateFrameEntry() {
1337 __ Bind(&frame_entry_label_);
1338
1339 bool do_overflow_check = FrameNeedsStackCheck(GetFrameSize(), kMips) || !IsLeafMethod();
1340
1341 if (do_overflow_check) {
1342 __ LoadFromOffset(kLoadWord,
1343 ZERO,
1344 SP,
1345 -static_cast<int32_t>(GetStackOverflowReservedBytes(kMips)));
1346 RecordPcInfo(nullptr, 0);
1347 }
1348
1349 if (HasEmptyFrame()) {
Alexey Frunze58320ce2016-08-30 21:40:46 -07001350 CHECK_EQ(fpu_spill_mask_, 0u);
1351 CHECK_EQ(core_spill_mask_, 1u << RA);
1352 CHECK(!clobbered_ra_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001353 return;
1354 }
1355
1356 // Make sure the frame size isn't unreasonably large.
1357 if (GetFrameSize() > GetStackOverflowReservedBytes(kMips)) {
1358 LOG(FATAL) << "Stack frame larger than " << GetStackOverflowReservedBytes(kMips) << " bytes";
1359 }
1360
1361 // Spill callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001362
Alexey Frunze73296a72016-06-03 22:51:46 -07001363 uint32_t ofs = GetFrameSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001364 __ IncreaseFrameSize(ofs);
1365
Alexey Frunze73296a72016-06-03 22:51:46 -07001366 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1367 Register reg = static_cast<Register>(MostSignificantBit(mask));
1368 mask ^= 1u << reg;
1369 ofs -= kMipsWordSize;
1370 // The ZERO register is only included for alignment.
1371 if (reg != ZERO) {
1372 __ StoreToOffset(kStoreWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001373 __ cfi().RelOffset(DWARFReg(reg), ofs);
1374 }
1375 }
1376
Alexey Frunze73296a72016-06-03 22:51:46 -07001377 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1378 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1379 mask ^= 1u << reg;
1380 ofs -= kMipsDoublewordSize;
1381 __ StoreDToOffset(reg, SP, ofs);
1382 // TODO: __ cfi().RelOffset(DWARFReg(reg), ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001383 }
1384
Nicolas Geoffray96eeb4e2016-10-12 22:03:31 +01001385 // Save the current method if we need it. Note that we do not
1386 // do this in HCurrentMethod, as the instruction might have been removed
1387 // in the SSA graph.
1388 if (RequiresCurrentMethod()) {
1389 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset);
1390 }
Goran Jakovljevicc6418422016-12-05 16:31:55 +01001391
1392 if (GetGraph()->HasShouldDeoptimizeFlag()) {
1393 // Initialize should deoptimize flag to 0.
1394 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag());
1395 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001396}
1397
1398void CodeGeneratorMIPS::GenerateFrameExit() {
1399 __ cfi().RememberState();
1400
1401 if (!HasEmptyFrame()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001402 // Restore callee-saved registers.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001403
Alexey Frunze73296a72016-06-03 22:51:46 -07001404 // For better instruction scheduling restore RA before other registers.
1405 uint32_t ofs = GetFrameSize();
1406 for (uint32_t mask = core_spill_mask_; mask != 0; ) {
1407 Register reg = static_cast<Register>(MostSignificantBit(mask));
1408 mask ^= 1u << reg;
1409 ofs -= kMipsWordSize;
1410 // The ZERO register is only included for alignment.
1411 if (reg != ZERO) {
1412 __ LoadFromOffset(kLoadWord, reg, SP, ofs);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001413 __ cfi().Restore(DWARFReg(reg));
1414 }
1415 }
1416
Alexey Frunze73296a72016-06-03 22:51:46 -07001417 for (uint32_t mask = fpu_spill_mask_; mask != 0; ) {
1418 FRegister reg = static_cast<FRegister>(MostSignificantBit(mask));
1419 mask ^= 1u << reg;
1420 ofs -= kMipsDoublewordSize;
1421 __ LoadDFromOffset(reg, SP, ofs);
1422 // TODO: __ cfi().Restore(DWARFReg(reg));
1423 }
1424
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001425 size_t frame_size = GetFrameSize();
1426 // Adjust the stack pointer in the delay slot if doing so doesn't break CFI.
1427 bool exchange = IsInt<16>(static_cast<int32_t>(frame_size));
1428 bool reordering = __ SetReorder(false);
1429 if (exchange) {
1430 __ Jr(RA);
1431 __ DecreaseFrameSize(frame_size); // Single instruction in delay slot.
1432 } else {
1433 __ DecreaseFrameSize(frame_size);
1434 __ Jr(RA);
1435 __ Nop(); // In delay slot.
1436 }
1437 __ SetReorder(reordering);
1438 } else {
1439 __ Jr(RA);
1440 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001441 }
1442
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001443 __ cfi().RestoreState();
1444 __ cfi().DefCFAOffset(GetFrameSize());
1445}
1446
1447void CodeGeneratorMIPS::Bind(HBasicBlock* block) {
1448 __ Bind(GetLabelOf(block));
1449}
1450
Lena Djokicca8c2952017-05-29 11:31:46 +02001451VectorRegister VectorRegisterFrom(Location location) {
1452 DCHECK(location.IsFpuRegister());
1453 return static_cast<VectorRegister>(location.AsFpuRegister<FRegister>());
1454}
1455
Lena Djokic8098da92017-06-28 12:07:50 +02001456void CodeGeneratorMIPS::MoveLocation(Location destination,
1457 Location source,
1458 Primitive::Type dst_type) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001459 if (source.Equals(destination)) {
1460 return;
1461 }
1462
Lena Djokic8098da92017-06-28 12:07:50 +02001463 if (source.IsConstant()) {
1464 MoveConstant(destination, source.GetConstant());
1465 } else {
1466 if (destination.IsRegister()) {
1467 if (source.IsRegister()) {
1468 __ Move(destination.AsRegister<Register>(), source.AsRegister<Register>());
1469 } else if (source.IsFpuRegister()) {
1470 __ Mfc1(destination.AsRegister<Register>(), source.AsFpuRegister<FRegister>());
1471 } else {
1472 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001473 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001474 }
1475 } else if (destination.IsRegisterPair()) {
1476 if (source.IsRegisterPair()) {
1477 __ Move(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
1478 __ Move(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
1479 } else if (source.IsFpuRegister()) {
1480 Register dst_high = destination.AsRegisterPairHigh<Register>();
1481 Register dst_low = destination.AsRegisterPairLow<Register>();
1482 FRegister src = source.AsFpuRegister<FRegister>();
1483 __ Mfc1(dst_low, src);
1484 __ MoveFromFpuHigh(dst_high, src);
1485 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001486 DCHECK(source.IsDoubleStackSlot())
1487 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001488 int32_t off = source.GetStackIndex();
1489 Register r = destination.AsRegisterPairLow<Register>();
1490 __ LoadFromOffset(kLoadDoubleword, r, SP, off);
1491 }
1492 } else if (destination.IsFpuRegister()) {
1493 if (source.IsRegister()) {
1494 DCHECK(!Primitive::Is64BitType(dst_type));
1495 __ Mtc1(source.AsRegister<Register>(), destination.AsFpuRegister<FRegister>());
1496 } else if (source.IsRegisterPair()) {
1497 DCHECK(Primitive::Is64BitType(dst_type));
1498 FRegister dst = destination.AsFpuRegister<FRegister>();
1499 Register src_high = source.AsRegisterPairHigh<Register>();
1500 Register src_low = source.AsRegisterPairLow<Register>();
1501 __ Mtc1(src_low, dst);
1502 __ MoveToFpuHigh(src_high, dst);
1503 } else if (source.IsFpuRegister()) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001504 if (GetGraph()->HasSIMD()) {
1505 __ MoveV(VectorRegisterFrom(destination),
1506 VectorRegisterFrom(source));
Lena Djokic8098da92017-06-28 12:07:50 +02001507 } else {
Lena Djokicca8c2952017-05-29 11:31:46 +02001508 if (Primitive::Is64BitType(dst_type)) {
1509 __ MovD(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1510 } else {
1511 DCHECK_EQ(dst_type, Primitive::kPrimFloat);
1512 __ MovS(destination.AsFpuRegister<FRegister>(), source.AsFpuRegister<FRegister>());
1513 }
Lena Djokic8098da92017-06-28 12:07:50 +02001514 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001515 } else if (source.IsSIMDStackSlot()) {
1516 __ LoadQFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
Lena Djokic8098da92017-06-28 12:07:50 +02001517 } else if (source.IsDoubleStackSlot()) {
1518 DCHECK(Primitive::Is64BitType(dst_type));
1519 __ LoadDFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1520 } else {
1521 DCHECK(!Primitive::Is64BitType(dst_type));
1522 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1523 __ LoadSFromOffset(destination.AsFpuRegister<FRegister>(), SP, source.GetStackIndex());
1524 }
Lena Djokicca8c2952017-05-29 11:31:46 +02001525 } else if (destination.IsSIMDStackSlot()) {
1526 if (source.IsFpuRegister()) {
1527 __ StoreQToOffset(source.AsFpuRegister<FRegister>(), SP, destination.GetStackIndex());
1528 } else {
1529 DCHECK(source.IsSIMDStackSlot());
1530 __ LoadQFromOffset(FTMP, SP, source.GetStackIndex());
1531 __ StoreQToOffset(FTMP, SP, destination.GetStackIndex());
1532 }
Lena Djokic8098da92017-06-28 12:07:50 +02001533 } else if (destination.IsDoubleStackSlot()) {
1534 int32_t dst_offset = destination.GetStackIndex();
1535 if (source.IsRegisterPair()) {
1536 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset);
1537 } else if (source.IsFpuRegister()) {
1538 __ StoreDToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1539 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001540 DCHECK(source.IsDoubleStackSlot())
1541 << "Cannot move from " << source << " to " << destination;
Lena Djokic8098da92017-06-28 12:07:50 +02001542 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1543 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1544 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex() + 4);
1545 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset + 4);
1546 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001547 } else {
Lena Djokic8098da92017-06-28 12:07:50 +02001548 DCHECK(destination.IsStackSlot()) << destination;
1549 int32_t dst_offset = destination.GetStackIndex();
1550 if (source.IsRegister()) {
1551 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, dst_offset);
1552 } else if (source.IsFpuRegister()) {
1553 __ StoreSToOffset(source.AsFpuRegister<FRegister>(), SP, dst_offset);
1554 } else {
1555 DCHECK(source.IsStackSlot()) << "Cannot move from " << source << " to " << destination;
1556 __ LoadFromOffset(kLoadWord, TMP, SP, source.GetStackIndex());
1557 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset);
1558 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001559 }
1560 }
1561}
1562
1563void CodeGeneratorMIPS::MoveConstant(Location destination, HConstant* c) {
1564 if (c->IsIntConstant() || c->IsNullConstant()) {
1565 // Move 32 bit constant.
1566 int32_t value = GetInt32ValueOf(c);
1567 if (destination.IsRegister()) {
1568 Register dst = destination.AsRegister<Register>();
1569 __ LoadConst32(dst, value);
1570 } else {
1571 DCHECK(destination.IsStackSlot())
1572 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001573 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001574 }
1575 } else if (c->IsLongConstant()) {
1576 // Move 64 bit constant.
1577 int64_t value = GetInt64ValueOf(c);
1578 if (destination.IsRegisterPair()) {
1579 Register r_h = destination.AsRegisterPairHigh<Register>();
1580 Register r_l = destination.AsRegisterPairLow<Register>();
1581 __ LoadConst64(r_h, r_l, value);
1582 } else {
1583 DCHECK(destination.IsDoubleStackSlot())
1584 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001585 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001586 }
1587 } else if (c->IsFloatConstant()) {
1588 // Move 32 bit float constant.
1589 int32_t value = GetInt32ValueOf(c);
1590 if (destination.IsFpuRegister()) {
1591 __ LoadSConst32(destination.AsFpuRegister<FRegister>(), value, TMP);
1592 } else {
1593 DCHECK(destination.IsStackSlot())
1594 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001595 __ StoreConstToOffset(kStoreWord, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001596 }
1597 } else {
1598 // Move 64 bit double constant.
1599 DCHECK(c->IsDoubleConstant()) << c->DebugName();
1600 int64_t value = GetInt64ValueOf(c);
1601 if (destination.IsFpuRegister()) {
1602 FRegister fd = destination.AsFpuRegister<FRegister>();
1603 __ LoadDConst64(fd, value, TMP);
1604 } else {
1605 DCHECK(destination.IsDoubleStackSlot())
1606 << "Cannot move " << c->DebugName() << " to " << destination;
Alexey Frunzef58b2482016-09-02 22:14:06 -07001607 __ StoreConstToOffset(kStoreDoubleword, value, SP, destination.GetStackIndex(), TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001608 }
1609 }
1610}
1611
1612void CodeGeneratorMIPS::MoveConstant(Location destination, int32_t value) {
1613 DCHECK(destination.IsRegister());
1614 Register dst = destination.AsRegister<Register>();
1615 __ LoadConst32(dst, value);
1616}
1617
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001618void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* locations) {
1619 if (location.IsRegister()) {
1620 locations->AddTemp(location);
Alexey Frunzec9e94f32015-10-26 16:11:39 -07001621 } else if (location.IsRegisterPair()) {
1622 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairLow<Register>()));
1623 locations->AddTemp(Location::RegisterLocation(location.AsRegisterPairHigh<Register>()));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001624 } else {
1625 UNIMPLEMENTED(FATAL) << "AddLocationAsTemp not implemented for location " << location;
1626 }
1627}
1628
Vladimir Markoaad75c62016-10-03 08:46:48 +00001629template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
1630inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches(
1631 const ArenaDeque<PcRelativePatchInfo>& infos,
1632 ArenaVector<LinkerPatch>* linker_patches) {
1633 for (const PcRelativePatchInfo& info : infos) {
1634 const DexFile& dex_file = info.target_dex_file;
1635 size_t offset_or_index = info.offset_or_index;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001636 DCHECK(info.label.IsBound());
1637 uint32_t literal_offset = __ GetLabelLocation(&info.label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001638 // On R2 we use HMipsComputeBaseMethodAddress and patch relative to
1639 // the assembler's base label used for PC-relative addressing.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001640 const PcRelativePatchInfo& info_high = info.patch_info_high ? *info.patch_info_high : info;
1641 uint32_t pc_rel_offset = info_high.pc_rel_label.IsBound()
1642 ? __ GetLabelLocation(&info_high.pc_rel_label)
Vladimir Markoaad75c62016-10-03 08:46:48 +00001643 : __ GetPcRelBaseLabelLocation();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001644 linker_patches->push_back(Factory(literal_offset, &dex_file, pc_rel_offset, offset_or_index));
Vladimir Markoaad75c62016-10-03 08:46:48 +00001645 }
1646}
1647
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001648void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
1649 DCHECK(linker_patches->empty());
1650 size_t size =
Vladimir Marko65979462017-05-19 17:25:12 +01001651 pc_relative_method_patches_.size() +
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001652 method_bss_entry_patches_.size() +
Alexey Frunze06a46c42016-07-19 15:00:40 -07001653 pc_relative_type_patches_.size() +
Vladimir Marko65979462017-05-19 17:25:12 +01001654 type_bss_entry_patches_.size() +
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001655 pc_relative_string_patches_.size() +
1656 string_bss_entry_patches_.size();
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001657 linker_patches->reserve(size);
Vladimir Marko65979462017-05-19 17:25:12 +01001658 if (GetCompilerOptions().IsBootImage()) {
1659 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
Vladimir Markoaad75c62016-10-03 08:46:48 +00001660 linker_patches);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00001661 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
1662 linker_patches);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001663 EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
1664 linker_patches);
Vladimir Marko65979462017-05-19 17:25:12 +01001665 } else {
1666 DCHECK(pc_relative_method_patches_.empty());
1667 DCHECK(pc_relative_type_patches_.empty());
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001668 EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
1669 linker_patches);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001670 }
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001671 EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
1672 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001673 EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
1674 linker_patches);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001675 EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
1676 linker_patches);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001677 DCHECK_EQ(size, linker_patches->size());
Alexey Frunze06a46c42016-07-19 15:00:40 -07001678}
1679
Vladimir Marko65979462017-05-19 17:25:12 +01001680CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeMethodPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001681 MethodReference target_method,
1682 const PcRelativePatchInfo* info_high) {
Vladimir Marko65979462017-05-19 17:25:12 +01001683 return NewPcRelativePatch(*target_method.dex_file,
1684 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001685 info_high,
Vladimir Marko65979462017-05-19 17:25:12 +01001686 &pc_relative_method_patches_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001687}
1688
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001689CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewMethodBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001690 MethodReference target_method,
1691 const PcRelativePatchInfo* info_high) {
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001692 return NewPcRelativePatch(*target_method.dex_file,
1693 target_method.dex_method_index,
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001694 info_high,
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001695 &method_bss_entry_patches_);
1696}
1697
Alexey Frunze06a46c42016-07-19 15:00:40 -07001698CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001699 const DexFile& dex_file,
1700 dex::TypeIndex type_index,
1701 const PcRelativePatchInfo* info_high) {
1702 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &pc_relative_type_patches_);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001703}
1704
Vladimir Marko1998cd02017-01-13 13:02:58 +00001705CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001706 const DexFile& dex_file,
1707 dex::TypeIndex type_index,
1708 const PcRelativePatchInfo* info_high) {
1709 return NewPcRelativePatch(dex_file, type_index.index_, info_high, &type_bss_entry_patches_);
Vladimir Marko1998cd02017-01-13 13:02:58 +00001710}
1711
Vladimir Marko65979462017-05-19 17:25:12 +01001712CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001713 const DexFile& dex_file,
1714 dex::StringIndex string_index,
1715 const PcRelativePatchInfo* info_high) {
1716 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
Vladimir Marko65979462017-05-19 17:25:12 +01001717}
1718
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01001719CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewStringBssEntryPatch(
1720 const DexFile& dex_file,
1721 dex::StringIndex string_index,
1722 const PcRelativePatchInfo* info_high) {
1723 return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
1724}
1725
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001726CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001727 const DexFile& dex_file,
1728 uint32_t offset_or_index,
1729 const PcRelativePatchInfo* info_high,
1730 ArenaDeque<PcRelativePatchInfo>* patches) {
1731 patches->emplace_back(dex_file, offset_or_index, info_high);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07001732 return &patches->back();
1733}
1734
Alexey Frunze06a46c42016-07-19 15:00:40 -07001735Literal* CodeGeneratorMIPS::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
1736 return map->GetOrCreate(
1737 value,
1738 [this, value]() { return __ NewLiteral<uint32_t>(value); });
1739}
1740
Alexey Frunze06a46c42016-07-19 15:00:40 -07001741Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) {
Richard Uhlerc52f3032017-03-02 13:45:45 +00001742 return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), &uint32_literals_);
Alexey Frunze06a46c42016-07-19 15:00:40 -07001743}
1744
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001745void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001746 Register out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001747 Register base) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001748 DCHECK(!info_high->patch_info_high);
Alexey Frunze6079dca2017-05-28 19:10:28 -07001749 DCHECK_NE(out, base);
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001750 bool reordering = __ SetReorder(false);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001751 if (GetInstructionSetFeatures().IsR6()) {
1752 DCHECK_EQ(base, ZERO);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001753 __ Bind(&info_high->label);
1754 __ Bind(&info_high->pc_rel_label);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001755 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001756 __ Auipc(out, /* placeholder */ 0x1234);
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001757 __ SetReorder(reordering);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001758 } else {
1759 // If base is ZERO, emit NAL to obtain the actual base.
1760 if (base == ZERO) {
1761 // Generate a dummy PC-relative call to obtain PC.
1762 __ Nal();
1763 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001764 __ Bind(&info_high->label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001765 __ Lui(out, /* placeholder */ 0x1234);
1766 // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding
1767 // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler.
1768 if (base == ZERO) {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001769 __ Bind(&info_high->pc_rel_label);
Vladimir Markoaad75c62016-10-03 08:46:48 +00001770 }
Alexey Frunzea663d9d2017-07-31 18:43:18 -07001771 __ SetReorder(reordering);
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001772 // Add the high half of a 32-bit offset to PC.
Vladimir Markoaad75c62016-10-03 08:46:48 +00001773 __ Addu(out, out, (base == ZERO) ? RA : base);
1774 }
Alexey Frunze5fa5c042017-06-01 21:07:52 -07001775 // A following instruction will add the sign-extended low half of the 32-bit
Alexey Frunze6b892cd2017-01-03 17:11:38 -08001776 // offset to `out` (e.g. lw, jialc, addiu).
Vladimir Markoaad75c62016-10-03 08:46:48 +00001777}
1778
Alexey Frunze627c1a02017-01-30 19:28:14 -08001779CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch(
1780 const DexFile& dex_file,
1781 dex::StringIndex dex_index,
1782 Handle<mirror::String> handle) {
1783 jit_string_roots_.Overwrite(StringReference(&dex_file, dex_index),
1784 reinterpret_cast64<uint64_t>(handle.GetReference()));
1785 jit_string_patches_.emplace_back(dex_file, dex_index.index_);
1786 return &jit_string_patches_.back();
1787}
1788
1789CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootClassPatch(
1790 const DexFile& dex_file,
1791 dex::TypeIndex dex_index,
1792 Handle<mirror::Class> handle) {
1793 jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
1794 reinterpret_cast64<uint64_t>(handle.GetReference()));
1795 jit_class_patches_.emplace_back(dex_file, dex_index.index_);
1796 return &jit_class_patches_.back();
1797}
1798
1799void CodeGeneratorMIPS::PatchJitRootUse(uint8_t* code,
1800 const uint8_t* roots_data,
1801 const CodeGeneratorMIPS::JitPatchInfo& info,
1802 uint64_t index_in_table) const {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001803 uint32_t high_literal_offset = GetAssembler().GetLabelLocation(&info.high_label);
1804 uint32_t low_literal_offset = GetAssembler().GetLabelLocation(&info.low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001805 uintptr_t address =
1806 reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
1807 uint32_t addr32 = dchecked_integral_cast<uint32_t>(address);
1808 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001809 DCHECK_EQ(code[high_literal_offset + 0], 0x34);
1810 DCHECK_EQ(code[high_literal_offset + 1], 0x12);
1811 DCHECK_EQ((code[high_literal_offset + 2] & 0xE0), 0x00);
1812 DCHECK_EQ(code[high_literal_offset + 3], 0x3C);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001813 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001814 DCHECK_EQ(code[low_literal_offset + 0], 0x78);
1815 DCHECK_EQ(code[low_literal_offset + 1], 0x56);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001816 addr32 += (addr32 & 0x8000) << 1; // Account for sign extension in "instr reg, reg, addr32_low".
Alexey Frunze627c1a02017-01-30 19:28:14 -08001817 // lui reg, addr32_high
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001818 code[high_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 16);
1819 code[high_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 24);
Alexey Frunzec61c0762017-04-10 13:54:23 -07001820 // instr reg, reg, addr32_low
Alexey Frunze4147fcc2017-06-17 19:57:27 -07001821 code[low_literal_offset + 0] = static_cast<uint8_t>(addr32 >> 0);
1822 code[low_literal_offset + 1] = static_cast<uint8_t>(addr32 >> 8);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001823}
1824
1825void CodeGeneratorMIPS::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
1826 for (const JitPatchInfo& info : jit_string_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001827 const auto it = jit_string_roots_.find(StringReference(&info.target_dex_file,
1828 dex::StringIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001829 DCHECK(it != jit_string_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001830 uint64_t index_in_table = it->second;
1831 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001832 }
1833 for (const JitPatchInfo& info : jit_class_patches_) {
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001834 const auto it = jit_class_roots_.find(TypeReference(&info.target_dex_file,
1835 dex::TypeIndex(info.index)));
Alexey Frunze627c1a02017-01-30 19:28:14 -08001836 DCHECK(it != jit_class_roots_.end());
Vladimir Marko7d157fc2017-05-10 16:29:23 +01001837 uint64_t index_in_table = it->second;
1838 PatchJitRootUse(code, roots_data, info, index_in_table);
Alexey Frunze627c1a02017-01-30 19:28:14 -08001839 }
1840}
1841
Goran Jakovljevice114da22016-12-26 14:21:43 +01001842void CodeGeneratorMIPS::MarkGCCard(Register object,
1843 Register value,
1844 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001845 MipsLabel done;
1846 Register card = AT;
1847 Register temp = TMP;
Goran Jakovljevice114da22016-12-26 14:21:43 +01001848 if (value_can_be_null) {
1849 __ Beqz(value, &done);
1850 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001851 __ LoadFromOffset(kLoadWord,
1852 card,
1853 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001854 Thread::CardTableOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001855 __ Srl(temp, object, gc::accounting::CardTable::kCardShift);
1856 __ Addu(temp, card, temp);
1857 __ Sb(card, temp, 0);
Goran Jakovljevice114da22016-12-26 14:21:43 +01001858 if (value_can_be_null) {
1859 __ Bind(&done);
1860 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001861}
1862
David Brazdil58282f42016-01-14 12:45:10 +00001863void CodeGeneratorMIPS::SetupBlockedRegisters() const {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001864 // ZERO, K0, K1, GP, SP, RA are always reserved and can't be allocated.
1865 blocked_core_registers_[ZERO] = true;
1866 blocked_core_registers_[K0] = true;
1867 blocked_core_registers_[K1] = true;
1868 blocked_core_registers_[GP] = true;
1869 blocked_core_registers_[SP] = true;
1870 blocked_core_registers_[RA] = true;
1871
1872 // AT and TMP(T8) are used as temporary/scratch registers
1873 // (similar to how AT is used by MIPS assemblers).
1874 blocked_core_registers_[AT] = true;
1875 blocked_core_registers_[TMP] = true;
1876 blocked_fpu_registers_[FTMP] = true;
1877
1878 // Reserve suspend and thread registers.
1879 blocked_core_registers_[S0] = true;
1880 blocked_core_registers_[TR] = true;
1881
1882 // Reserve T9 for function calls
1883 blocked_core_registers_[T9] = true;
1884
1885 // Reserve odd-numbered FPU registers.
1886 for (size_t i = 1; i < kNumberOfFRegisters; i += 2) {
1887 blocked_fpu_registers_[i] = true;
1888 }
1889
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02001890 if (GetGraph()->IsDebuggable()) {
1891 // Stubs do not save callee-save floating point registers. If the graph
1892 // is debuggable, we need to deal with these registers differently. For
1893 // now, just block them.
1894 for (size_t i = 0; i < arraysize(kFpuCalleeSaves); ++i) {
1895 blocked_fpu_registers_[kFpuCalleeSaves[i]] = true;
1896 }
1897 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001898}
1899
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001900size_t CodeGeneratorMIPS::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
1901 __ StoreToOffset(kStoreWord, Register(reg_id), SP, stack_index);
1902 return kMipsWordSize;
1903}
1904
1905size_t CodeGeneratorMIPS::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
1906 __ LoadFromOffset(kLoadWord, Register(reg_id), SP, stack_index);
1907 return kMipsWordSize;
1908}
1909
1910size_t CodeGeneratorMIPS::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001911 if (GetGraph()->HasSIMD()) {
1912 __ StoreQToOffset(FRegister(reg_id), SP, stack_index);
1913 } else {
1914 __ StoreDToOffset(FRegister(reg_id), SP, stack_index);
1915 }
1916 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001917}
1918
1919size_t CodeGeneratorMIPS::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) {
Lena Djokicca8c2952017-05-29 11:31:46 +02001920 if (GetGraph()->HasSIMD()) {
1921 __ LoadQFromOffset(FRegister(reg_id), SP, stack_index);
1922 } else {
1923 __ LoadDFromOffset(FRegister(reg_id), SP, stack_index);
1924 }
1925 return GetFloatingPointSpillSlotSize();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001926}
1927
1928void CodeGeneratorMIPS::DumpCoreRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001929 stream << Register(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001930}
1931
1932void CodeGeneratorMIPS::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
Vladimir Marko623a7a22016-02-02 18:14:52 +00001933 stream << FRegister(reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001934}
1935
Serban Constantinescufca16662016-07-14 09:21:59 +01001936constexpr size_t kMipsDirectEntrypointRuntimeOffset = 16;
1937
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001938void CodeGeneratorMIPS::InvokeRuntime(QuickEntrypointEnum entrypoint,
1939 HInstruction* instruction,
1940 uint32_t dex_pc,
1941 SlowPathCode* slow_path) {
Alexandre Rames91a65162016-09-19 13:54:30 +01001942 ValidateInvokeRuntime(entrypoint, instruction, slow_path);
Alexey Frunze15958152017-02-09 19:08:30 -08001943 GenerateInvokeRuntime(GetThreadOffset<kMipsPointerSize>(entrypoint).Int32Value(),
1944 IsDirectEntrypoint(entrypoint));
1945 if (EntrypointRequiresStackMap(entrypoint)) {
1946 RecordPcInfo(instruction, dex_pc, slow_path);
1947 }
1948}
1949
1950void CodeGeneratorMIPS::InvokeRuntimeWithoutRecordingPcInfo(int32_t entry_point_offset,
1951 HInstruction* instruction,
1952 SlowPathCode* slow_path,
1953 bool direct) {
1954 ValidateInvokeRuntimeWithoutRecordingPcInfo(instruction, slow_path);
1955 GenerateInvokeRuntime(entry_point_offset, direct);
1956}
1957
1958void CodeGeneratorMIPS::GenerateInvokeRuntime(int32_t entry_point_offset, bool direct) {
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001959 bool reordering = __ SetReorder(false);
Alexey Frunze15958152017-02-09 19:08:30 -08001960 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001961 __ Jalr(T9);
Alexey Frunze15958152017-02-09 19:08:30 -08001962 if (direct) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001963 // Reserve argument space on stack (for $a0-$a3) for
1964 // entrypoints that directly reference native implementations.
1965 // Called function may use this space to store $a0-$a3 regs.
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001966 __ IncreaseFrameSize(kMipsDirectEntrypointRuntimeOffset); // Single instruction in delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001967 __ DecreaseFrameSize(kMipsDirectEntrypointRuntimeOffset);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08001968 } else {
1969 __ Nop(); // In delay slot.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001970 }
Alexey Frunze57eb0f52016-07-29 22:04:46 -07001971 __ SetReorder(reordering);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001972}
1973
1974void InstructionCodeGeneratorMIPS::GenerateClassInitializationCheck(SlowPathCodeMIPS* slow_path,
1975 Register class_reg) {
1976 __ LoadFromOffset(kLoadWord, TMP, class_reg, mirror::Class::StatusOffset().Int32Value());
1977 __ LoadConst32(AT, mirror::Class::kStatusInitialized);
1978 __ Blt(TMP, AT, slow_path->GetEntryLabel());
1979 // Even if the initialized flag is set, we need to ensure consistent memory ordering.
1980 __ Sync(0);
1981 __ Bind(slow_path->GetExitLabel());
1982}
1983
1984void InstructionCodeGeneratorMIPS::GenerateMemoryBarrier(MemBarrierKind kind ATTRIBUTE_UNUSED) {
1985 __ Sync(0); // Only stype 0 is supported.
1986}
1987
1988void InstructionCodeGeneratorMIPS::GenerateSuspendCheck(HSuspendCheck* instruction,
1989 HBasicBlock* successor) {
1990 SuspendCheckSlowPathMIPS* slow_path =
1991 new (GetGraph()->GetArena()) SuspendCheckSlowPathMIPS(instruction, successor);
1992 codegen_->AddSlowPath(slow_path);
1993
1994 __ LoadFromOffset(kLoadUnsignedHalfword,
1995 TMP,
1996 TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001997 Thread::ThreadFlagsOffset<kMipsPointerSize>().Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02001998 if (successor == nullptr) {
1999 __ Bnez(TMP, slow_path->GetEntryLabel());
2000 __ Bind(slow_path->GetReturnLabel());
2001 } else {
2002 __ Beqz(TMP, codegen_->GetLabelOf(successor));
2003 __ B(slow_path->GetEntryLabel());
2004 // slow_path will return to GetLabelOf(successor).
2005 }
2006}
2007
2008InstructionCodeGeneratorMIPS::InstructionCodeGeneratorMIPS(HGraph* graph,
2009 CodeGeneratorMIPS* codegen)
Aart Bik42249c32016-01-07 15:33:50 -08002010 : InstructionCodeGenerator(graph, codegen),
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002011 assembler_(codegen->GetAssembler()),
2012 codegen_(codegen) {}
2013
2014void LocationsBuilderMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2015 DCHECK_EQ(instruction->InputCount(), 2U);
2016 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2017 Primitive::Type type = instruction->GetResultType();
2018 switch (type) {
2019 case Primitive::kPrimInt: {
2020 locations->SetInAt(0, Location::RequiresRegister());
2021 HInstruction* right = instruction->InputAt(1);
2022 bool can_use_imm = false;
2023 if (right->IsConstant()) {
2024 int32_t imm = CodeGenerator::GetInt32ValueOf(right->AsConstant());
2025 if (instruction->IsAnd() || instruction->IsOr() || instruction->IsXor()) {
2026 can_use_imm = IsUint<16>(imm);
2027 } else if (instruction->IsAdd()) {
2028 can_use_imm = IsInt<16>(imm);
2029 } else {
2030 DCHECK(instruction->IsSub());
2031 can_use_imm = IsInt<16>(-imm);
2032 }
2033 }
2034 if (can_use_imm)
2035 locations->SetInAt(1, Location::ConstantLocation(right->AsConstant()));
2036 else
2037 locations->SetInAt(1, Location::RequiresRegister());
2038 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2039 break;
2040 }
2041
2042 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002043 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002044 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2045 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002046 break;
2047 }
2048
2049 case Primitive::kPrimFloat:
2050 case Primitive::kPrimDouble:
2051 DCHECK(instruction->IsAdd() || instruction->IsSub());
2052 locations->SetInAt(0, Location::RequiresFpuRegister());
2053 locations->SetInAt(1, Location::RequiresFpuRegister());
2054 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2055 break;
2056
2057 default:
2058 LOG(FATAL) << "Unexpected " << instruction->DebugName() << " type " << type;
2059 }
2060}
2061
2062void InstructionCodeGeneratorMIPS::HandleBinaryOp(HBinaryOperation* instruction) {
2063 Primitive::Type type = instruction->GetType();
2064 LocationSummary* locations = instruction->GetLocations();
2065
2066 switch (type) {
2067 case Primitive::kPrimInt: {
2068 Register dst = locations->Out().AsRegister<Register>();
2069 Register lhs = locations->InAt(0).AsRegister<Register>();
2070 Location rhs_location = locations->InAt(1);
2071
2072 Register rhs_reg = ZERO;
2073 int32_t rhs_imm = 0;
2074 bool use_imm = rhs_location.IsConstant();
2075 if (use_imm) {
2076 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
2077 } else {
2078 rhs_reg = rhs_location.AsRegister<Register>();
2079 }
2080
2081 if (instruction->IsAnd()) {
2082 if (use_imm)
2083 __ Andi(dst, lhs, rhs_imm);
2084 else
2085 __ And(dst, lhs, rhs_reg);
2086 } else if (instruction->IsOr()) {
2087 if (use_imm)
2088 __ Ori(dst, lhs, rhs_imm);
2089 else
2090 __ Or(dst, lhs, rhs_reg);
2091 } else if (instruction->IsXor()) {
2092 if (use_imm)
2093 __ Xori(dst, lhs, rhs_imm);
2094 else
2095 __ Xor(dst, lhs, rhs_reg);
2096 } else if (instruction->IsAdd()) {
2097 if (use_imm)
2098 __ Addiu(dst, lhs, rhs_imm);
2099 else
2100 __ Addu(dst, lhs, rhs_reg);
2101 } else {
2102 DCHECK(instruction->IsSub());
2103 if (use_imm)
2104 __ Addiu(dst, lhs, -rhs_imm);
2105 else
2106 __ Subu(dst, lhs, rhs_reg);
2107 }
2108 break;
2109 }
2110
2111 case Primitive::kPrimLong: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002112 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2113 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2114 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2115 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002116 Location rhs_location = locations->InAt(1);
2117 bool use_imm = rhs_location.IsConstant();
2118 if (!use_imm) {
2119 Register rhs_high = rhs_location.AsRegisterPairHigh<Register>();
2120 Register rhs_low = rhs_location.AsRegisterPairLow<Register>();
2121 if (instruction->IsAnd()) {
2122 __ And(dst_low, lhs_low, rhs_low);
2123 __ And(dst_high, lhs_high, rhs_high);
2124 } else if (instruction->IsOr()) {
2125 __ Or(dst_low, lhs_low, rhs_low);
2126 __ Or(dst_high, lhs_high, rhs_high);
2127 } else if (instruction->IsXor()) {
2128 __ Xor(dst_low, lhs_low, rhs_low);
2129 __ Xor(dst_high, lhs_high, rhs_high);
2130 } else if (instruction->IsAdd()) {
2131 if (lhs_low == rhs_low) {
2132 // Special case for lhs = rhs and the sum potentially overwriting both lhs and rhs.
2133 __ Slt(TMP, lhs_low, ZERO);
2134 __ Addu(dst_low, lhs_low, rhs_low);
2135 } else {
2136 __ Addu(dst_low, lhs_low, rhs_low);
2137 // If the sum overwrites rhs, lhs remains unchanged, otherwise rhs remains unchanged.
2138 __ Sltu(TMP, dst_low, (dst_low == rhs_low) ? lhs_low : rhs_low);
2139 }
2140 __ Addu(dst_high, lhs_high, rhs_high);
2141 __ Addu(dst_high, dst_high, TMP);
2142 } else {
2143 DCHECK(instruction->IsSub());
2144 __ Sltu(TMP, lhs_low, rhs_low);
2145 __ Subu(dst_low, lhs_low, rhs_low);
2146 __ Subu(dst_high, lhs_high, rhs_high);
2147 __ Subu(dst_high, dst_high, TMP);
2148 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002149 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002150 int64_t value = CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()->AsConstant());
2151 if (instruction->IsOr()) {
2152 uint32_t low = Low32Bits(value);
2153 uint32_t high = High32Bits(value);
2154 if (IsUint<16>(low)) {
2155 if (dst_low != lhs_low || low != 0) {
2156 __ Ori(dst_low, lhs_low, low);
2157 }
2158 } else {
2159 __ LoadConst32(TMP, low);
2160 __ Or(dst_low, lhs_low, TMP);
2161 }
2162 if (IsUint<16>(high)) {
2163 if (dst_high != lhs_high || high != 0) {
2164 __ Ori(dst_high, lhs_high, high);
2165 }
2166 } else {
2167 if (high != low) {
2168 __ LoadConst32(TMP, high);
2169 }
2170 __ Or(dst_high, lhs_high, TMP);
2171 }
2172 } else if (instruction->IsXor()) {
2173 uint32_t low = Low32Bits(value);
2174 uint32_t high = High32Bits(value);
2175 if (IsUint<16>(low)) {
2176 if (dst_low != lhs_low || low != 0) {
2177 __ Xori(dst_low, lhs_low, low);
2178 }
2179 } else {
2180 __ LoadConst32(TMP, low);
2181 __ Xor(dst_low, lhs_low, TMP);
2182 }
2183 if (IsUint<16>(high)) {
2184 if (dst_high != lhs_high || high != 0) {
2185 __ Xori(dst_high, lhs_high, high);
2186 }
2187 } else {
2188 if (high != low) {
2189 __ LoadConst32(TMP, high);
2190 }
2191 __ Xor(dst_high, lhs_high, TMP);
2192 }
2193 } else if (instruction->IsAnd()) {
2194 uint32_t low = Low32Bits(value);
2195 uint32_t high = High32Bits(value);
2196 if (IsUint<16>(low)) {
2197 __ Andi(dst_low, lhs_low, low);
2198 } else if (low != 0xFFFFFFFF) {
2199 __ LoadConst32(TMP, low);
2200 __ And(dst_low, lhs_low, TMP);
2201 } else if (dst_low != lhs_low) {
2202 __ Move(dst_low, lhs_low);
2203 }
2204 if (IsUint<16>(high)) {
2205 __ Andi(dst_high, lhs_high, high);
2206 } else if (high != 0xFFFFFFFF) {
2207 if (high != low) {
2208 __ LoadConst32(TMP, high);
2209 }
2210 __ And(dst_high, lhs_high, TMP);
2211 } else if (dst_high != lhs_high) {
2212 __ Move(dst_high, lhs_high);
2213 }
2214 } else {
2215 if (instruction->IsSub()) {
2216 value = -value;
2217 } else {
2218 DCHECK(instruction->IsAdd());
2219 }
2220 int32_t low = Low32Bits(value);
2221 int32_t high = High32Bits(value);
2222 if (IsInt<16>(low)) {
2223 if (dst_low != lhs_low || low != 0) {
2224 __ Addiu(dst_low, lhs_low, low);
2225 }
2226 if (low != 0) {
2227 __ Sltiu(AT, dst_low, low);
2228 }
2229 } else {
2230 __ LoadConst32(TMP, low);
2231 __ Addu(dst_low, lhs_low, TMP);
2232 __ Sltu(AT, dst_low, TMP);
2233 }
2234 if (IsInt<16>(high)) {
2235 if (dst_high != lhs_high || high != 0) {
2236 __ Addiu(dst_high, lhs_high, high);
2237 }
2238 } else {
2239 if (high != low) {
2240 __ LoadConst32(TMP, high);
2241 }
2242 __ Addu(dst_high, lhs_high, TMP);
2243 }
2244 if (low != 0) {
2245 __ Addu(dst_high, dst_high, AT);
2246 }
2247 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002248 }
2249 break;
2250 }
2251
2252 case Primitive::kPrimFloat:
2253 case Primitive::kPrimDouble: {
2254 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
2255 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
2256 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
2257 if (instruction->IsAdd()) {
2258 if (type == Primitive::kPrimFloat) {
2259 __ AddS(dst, lhs, rhs);
2260 } else {
2261 __ AddD(dst, lhs, rhs);
2262 }
2263 } else {
2264 DCHECK(instruction->IsSub());
2265 if (type == Primitive::kPrimFloat) {
2266 __ SubS(dst, lhs, rhs);
2267 } else {
2268 __ SubD(dst, lhs, rhs);
2269 }
2270 }
2271 break;
2272 }
2273
2274 default:
2275 LOG(FATAL) << "Unexpected binary operation type " << type;
2276 }
2277}
2278
2279void LocationsBuilderMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002280 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002281
2282 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instr);
2283 Primitive::Type type = instr->GetResultType();
2284 switch (type) {
2285 case Primitive::kPrimInt:
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002286 locations->SetInAt(0, Location::RequiresRegister());
2287 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2288 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2289 break;
2290 case Primitive::kPrimLong:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002291 locations->SetInAt(0, Location::RequiresRegister());
2292 locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1)));
2293 locations->SetOut(Location::RequiresRegister());
2294 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002295 default:
2296 LOG(FATAL) << "Unexpected shift type " << type;
2297 }
2298}
2299
2300static constexpr size_t kMipsBitsPerWord = kMipsWordSize * kBitsPerByte;
2301
2302void InstructionCodeGeneratorMIPS::HandleShift(HBinaryOperation* instr) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002303 DCHECK(instr->IsShl() || instr->IsShr() || instr->IsUShr() || instr->IsRor());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002304 LocationSummary* locations = instr->GetLocations();
2305 Primitive::Type type = instr->GetType();
2306
2307 Location rhs_location = locations->InAt(1);
2308 bool use_imm = rhs_location.IsConstant();
2309 Register rhs_reg = use_imm ? ZERO : rhs_location.AsRegister<Register>();
2310 int64_t rhs_imm = use_imm ? CodeGenerator::GetInt64ValueOf(rhs_location.GetConstant()) : 0;
Roland Levillain5b5b9312016-03-22 14:57:31 +00002311 const uint32_t shift_mask =
2312 (type == Primitive::kPrimInt) ? kMaxIntShiftDistance : kMaxLongShiftDistance;
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002313 const uint32_t shift_value = rhs_imm & shift_mask;
Alexey Frunze92d90602015-12-18 18:16:36 -08002314 // Are the INS (Insert Bit Field) and ROTR instructions supported?
2315 bool has_ins_rotr = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002316
2317 switch (type) {
2318 case Primitive::kPrimInt: {
2319 Register dst = locations->Out().AsRegister<Register>();
2320 Register lhs = locations->InAt(0).AsRegister<Register>();
2321 if (use_imm) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002322 if (shift_value == 0) {
2323 if (dst != lhs) {
2324 __ Move(dst, lhs);
2325 }
2326 } else if (instr->IsShl()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002327 __ Sll(dst, lhs, shift_value);
2328 } else if (instr->IsShr()) {
2329 __ Sra(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002330 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002331 __ Srl(dst, lhs, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002332 } else {
2333 if (has_ins_rotr) {
2334 __ Rotr(dst, lhs, shift_value);
2335 } else {
2336 __ Sll(TMP, lhs, (kMipsBitsPerWord - shift_value) & shift_mask);
2337 __ Srl(dst, lhs, shift_value);
2338 __ Or(dst, dst, TMP);
2339 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002340 }
2341 } else {
2342 if (instr->IsShl()) {
2343 __ Sllv(dst, lhs, rhs_reg);
2344 } else if (instr->IsShr()) {
2345 __ Srav(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002346 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002347 __ Srlv(dst, lhs, rhs_reg);
Alexey Frunze92d90602015-12-18 18:16:36 -08002348 } else {
2349 if (has_ins_rotr) {
2350 __ Rotrv(dst, lhs, rhs_reg);
2351 } else {
2352 __ Subu(TMP, ZERO, rhs_reg);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002353 // 32-bit shift instructions use the 5 least significant bits of the shift count, so
2354 // shifting by `-rhs_reg` is equivalent to shifting by `(32 - rhs_reg) & 31`. The case
2355 // when `rhs_reg & 31 == 0` is OK even though we don't shift `lhs` left all the way out
2356 // by 32, because the result in this case is computed as `(lhs >> 0) | (lhs << 0)`,
2357 // IOW, the OR'd values are equal.
Alexey Frunze92d90602015-12-18 18:16:36 -08002358 __ Sllv(TMP, lhs, TMP);
2359 __ Srlv(dst, lhs, rhs_reg);
2360 __ Or(dst, dst, TMP);
2361 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002362 }
2363 }
2364 break;
2365 }
2366
2367 case Primitive::kPrimLong: {
2368 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
2369 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
2370 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2371 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
2372 if (use_imm) {
2373 if (shift_value == 0) {
Lena Djokic8098da92017-06-28 12:07:50 +02002374 codegen_->MoveLocation(locations->Out(), locations->InAt(0), type);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002375 } else if (shift_value < kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002376 if (has_ins_rotr) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002377 if (instr->IsShl()) {
2378 __ Srl(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2379 __ Ins(dst_high, lhs_high, shift_value, kMipsBitsPerWord - shift_value);
2380 __ Sll(dst_low, lhs_low, shift_value);
2381 } else if (instr->IsShr()) {
2382 __ Srl(dst_low, lhs_low, shift_value);
2383 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2384 __ Sra(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002385 } else if (instr->IsUShr()) {
2386 __ Srl(dst_low, lhs_low, shift_value);
2387 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2388 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002389 } else {
2390 __ Srl(dst_low, lhs_low, shift_value);
2391 __ Ins(dst_low, lhs_high, kMipsBitsPerWord - shift_value, shift_value);
2392 __ Srl(dst_high, lhs_high, shift_value);
Alexey Frunze92d90602015-12-18 18:16:36 -08002393 __ Ins(dst_high, lhs_low, kMipsBitsPerWord - shift_value, shift_value);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002394 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002395 } else {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002396 if (instr->IsShl()) {
2397 __ Sll(dst_low, lhs_low, shift_value);
2398 __ Srl(TMP, lhs_low, kMipsBitsPerWord - shift_value);
2399 __ Sll(dst_high, lhs_high, shift_value);
2400 __ Or(dst_high, dst_high, TMP);
2401 } else if (instr->IsShr()) {
2402 __ Sra(dst_high, lhs_high, shift_value);
2403 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2404 __ Srl(dst_low, lhs_low, shift_value);
2405 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002406 } else if (instr->IsUShr()) {
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002407 __ Srl(dst_high, lhs_high, shift_value);
2408 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value);
2409 __ Srl(dst_low, lhs_low, shift_value);
2410 __ Or(dst_low, dst_low, TMP);
Alexey Frunze92d90602015-12-18 18:16:36 -08002411 } else {
2412 __ Srl(TMP, lhs_low, shift_value);
2413 __ Sll(dst_low, lhs_high, kMipsBitsPerWord - shift_value);
2414 __ Or(dst_low, dst_low, TMP);
2415 __ Srl(TMP, lhs_high, shift_value);
2416 __ Sll(dst_high, lhs_low, kMipsBitsPerWord - shift_value);
2417 __ Or(dst_high, dst_high, TMP);
Alexey Frunze5c7aed32015-11-25 19:41:54 -08002418 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002419 }
2420 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002421 const uint32_t shift_value_high = shift_value - kMipsBitsPerWord;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002422 if (instr->IsShl()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002423 __ Sll(dst_high, lhs_low, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002424 __ Move(dst_low, ZERO);
2425 } else if (instr->IsShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002426 __ Sra(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002427 __ Sra(dst_high, dst_low, kMipsBitsPerWord - 1);
Alexey Frunze92d90602015-12-18 18:16:36 -08002428 } else if (instr->IsUShr()) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002429 __ Srl(dst_low, lhs_high, shift_value_high);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002430 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002431 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002432 if (shift_value == kMipsBitsPerWord) {
Alexey Frunze92d90602015-12-18 18:16:36 -08002433 // 64-bit rotation by 32 is just a swap.
2434 __ Move(dst_low, lhs_high);
2435 __ Move(dst_high, lhs_low);
2436 } else {
2437 if (has_ins_rotr) {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002438 __ Srl(dst_low, lhs_high, shift_value_high);
2439 __ Ins(dst_low, lhs_low, kMipsBitsPerWord - shift_value_high, shift_value_high);
2440 __ Srl(dst_high, lhs_low, shift_value_high);
2441 __ Ins(dst_high, lhs_high, kMipsBitsPerWord - shift_value_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002442 } else {
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002443 __ Sll(TMP, lhs_low, kMipsBitsPerWord - shift_value_high);
2444 __ Srl(dst_low, lhs_high, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002445 __ Or(dst_low, dst_low, TMP);
Alexey Frunze0d9150b2016-01-13 16:24:25 -08002446 __ Sll(TMP, lhs_high, kMipsBitsPerWord - shift_value_high);
2447 __ Srl(dst_high, lhs_low, shift_value_high);
Alexey Frunze92d90602015-12-18 18:16:36 -08002448 __ Or(dst_high, dst_high, TMP);
2449 }
2450 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002451 }
2452 }
2453 } else {
2454 MipsLabel done;
2455 if (instr->IsShl()) {
2456 __ Sllv(dst_low, lhs_low, rhs_reg);
2457 __ Nor(AT, ZERO, rhs_reg);
2458 __ Srl(TMP, lhs_low, 1);
2459 __ Srlv(TMP, TMP, AT);
2460 __ Sllv(dst_high, lhs_high, rhs_reg);
2461 __ Or(dst_high, dst_high, TMP);
2462 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2463 __ Beqz(TMP, &done);
2464 __ Move(dst_high, dst_low);
2465 __ Move(dst_low, ZERO);
2466 } else if (instr->IsShr()) {
2467 __ Srav(dst_high, lhs_high, rhs_reg);
2468 __ Nor(AT, ZERO, rhs_reg);
2469 __ Sll(TMP, lhs_high, 1);
2470 __ Sllv(TMP, TMP, AT);
2471 __ Srlv(dst_low, lhs_low, rhs_reg);
2472 __ Or(dst_low, dst_low, TMP);
2473 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2474 __ Beqz(TMP, &done);
2475 __ Move(dst_low, dst_high);
2476 __ Sra(dst_high, dst_high, 31);
Alexey Frunze92d90602015-12-18 18:16:36 -08002477 } else if (instr->IsUShr()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002478 __ Srlv(dst_high, lhs_high, rhs_reg);
2479 __ Nor(AT, ZERO, rhs_reg);
2480 __ Sll(TMP, lhs_high, 1);
2481 __ Sllv(TMP, TMP, AT);
2482 __ Srlv(dst_low, lhs_low, rhs_reg);
2483 __ Or(dst_low, dst_low, TMP);
2484 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2485 __ Beqz(TMP, &done);
2486 __ Move(dst_low, dst_high);
2487 __ Move(dst_high, ZERO);
Alexey Frunze92d90602015-12-18 18:16:36 -08002488 } else {
2489 __ Nor(AT, ZERO, rhs_reg);
2490 __ Srlv(TMP, lhs_low, rhs_reg);
2491 __ Sll(dst_low, lhs_high, 1);
2492 __ Sllv(dst_low, dst_low, AT);
2493 __ Or(dst_low, dst_low, TMP);
2494 __ Srlv(TMP, lhs_high, rhs_reg);
2495 __ Sll(dst_high, lhs_low, 1);
2496 __ Sllv(dst_high, dst_high, AT);
2497 __ Or(dst_high, dst_high, TMP);
2498 __ Andi(TMP, rhs_reg, kMipsBitsPerWord);
2499 __ Beqz(TMP, &done);
2500 __ Move(TMP, dst_high);
2501 __ Move(dst_high, dst_low);
2502 __ Move(dst_low, TMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002503 }
2504 __ Bind(&done);
2505 }
2506 break;
2507 }
2508
2509 default:
2510 LOG(FATAL) << "Unexpected shift operation type " << type;
2511 }
2512}
2513
2514void LocationsBuilderMIPS::VisitAdd(HAdd* instruction) {
2515 HandleBinaryOp(instruction);
2516}
2517
2518void InstructionCodeGeneratorMIPS::VisitAdd(HAdd* instruction) {
2519 HandleBinaryOp(instruction);
2520}
2521
2522void LocationsBuilderMIPS::VisitAnd(HAnd* instruction) {
2523 HandleBinaryOp(instruction);
2524}
2525
2526void InstructionCodeGeneratorMIPS::VisitAnd(HAnd* instruction) {
2527 HandleBinaryOp(instruction);
2528}
2529
2530void LocationsBuilderMIPS::VisitArrayGet(HArrayGet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002531 Primitive::Type type = instruction->GetType();
2532 bool object_array_get_with_read_barrier =
2533 kEmitCompilerReadBarrier && (type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002534 LocationSummary* locations =
Alexey Frunze15958152017-02-09 19:08:30 -08002535 new (GetGraph()->GetArena()) LocationSummary(instruction,
2536 object_array_get_with_read_barrier
2537 ? LocationSummary::kCallOnSlowPath
2538 : LocationSummary::kNoCall);
Alexey Frunzec61c0762017-04-10 13:54:23 -07002539 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
2540 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
2541 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002542 locations->SetInAt(0, Location::RequiresRegister());
2543 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
Alexey Frunze15958152017-02-09 19:08:30 -08002544 if (Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002545 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
2546 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002547 // The output overlaps in the case of an object array get with
2548 // read barriers enabled: we do not want the move to overwrite the
2549 // array's location, as we need it to emit the read barrier.
2550 locations->SetOut(Location::RequiresRegister(),
2551 object_array_get_with_read_barrier
2552 ? Location::kOutputOverlap
2553 : Location::kNoOutputOverlap);
2554 }
2555 // We need a temporary register for the read barrier marking slow
2556 // path in CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier.
2557 if (object_array_get_with_read_barrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002558 bool temp_needed = instruction->GetIndex()->IsConstant()
2559 ? !kBakerReadBarrierThunksEnableForFields
2560 : !kBakerReadBarrierThunksEnableForArrays;
2561 if (temp_needed) {
2562 locations->AddTemp(Location::RequiresRegister());
2563 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002564 }
2565}
2566
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002567static auto GetImplicitNullChecker(HInstruction* instruction, CodeGeneratorMIPS* codegen) {
2568 auto null_checker = [codegen, instruction]() {
2569 codegen->MaybeRecordImplicitNullCheck(instruction);
Alexey Frunze2923db72016-08-20 01:55:47 -07002570 };
2571 return null_checker;
2572}
2573
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002574void InstructionCodeGeneratorMIPS::VisitArrayGet(HArrayGet* instruction) {
2575 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08002576 Location obj_loc = locations->InAt(0);
2577 Register obj = obj_loc.AsRegister<Register>();
2578 Location out_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002579 Location index = locations->InAt(1);
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002580 uint32_t data_offset = CodeGenerator::GetArrayDataOffset(instruction);
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002581 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002582
Vladimir Marko87f3fcb2016-04-28 15:52:11 +01002583 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002584 const bool maybe_compressed_char_at = mirror::kUseStringCompression &&
2585 instruction->IsStringCharAt();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002586 switch (type) {
2587 case Primitive::kPrimBoolean: {
Alexey Frunze15958152017-02-09 19:08:30 -08002588 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002589 if (index.IsConstant()) {
2590 size_t offset =
2591 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002592 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002593 } else {
2594 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002595 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002596 }
2597 break;
2598 }
2599
2600 case Primitive::kPrimByte: {
Alexey Frunze15958152017-02-09 19:08:30 -08002601 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002602 if (index.IsConstant()) {
2603 size_t offset =
2604 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002605 __ LoadFromOffset(kLoadSignedByte, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002606 } else {
2607 __ Addu(TMP, obj, index.AsRegister<Register>());
Alexey Frunze2923db72016-08-20 01:55:47 -07002608 __ LoadFromOffset(kLoadSignedByte, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002609 }
2610 break;
2611 }
2612
2613 case Primitive::kPrimShort: {
Alexey Frunze15958152017-02-09 19:08:30 -08002614 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002615 if (index.IsConstant()) {
2616 size_t offset =
2617 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002618 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002619 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002620 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_2, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002621 __ LoadFromOffset(kLoadSignedHalfword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002622 }
2623 break;
2624 }
2625
2626 case Primitive::kPrimChar: {
Alexey Frunze15958152017-02-09 19:08:30 -08002627 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002628 if (maybe_compressed_char_at) {
2629 uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2630 __ LoadFromOffset(kLoadWord, TMP, obj, count_offset, null_checker);
2631 __ Sll(TMP, TMP, 31); // Extract compression flag into the most significant bit of TMP.
2632 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
2633 "Expecting 0=compressed, 1=uncompressed");
2634 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002635 if (index.IsConstant()) {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002636 int32_t const_index = index.GetConstant()->AsIntConstant()->GetValue();
2637 if (maybe_compressed_char_at) {
2638 MipsLabel uncompressed_load, done;
2639 __ Bnez(TMP, &uncompressed_load);
2640 __ LoadFromOffset(kLoadUnsignedByte,
2641 out,
2642 obj,
2643 data_offset + (const_index << TIMES_1));
2644 __ B(&done);
2645 __ Bind(&uncompressed_load);
2646 __ LoadFromOffset(kLoadUnsignedHalfword,
2647 out,
2648 obj,
2649 data_offset + (const_index << TIMES_2));
2650 __ Bind(&done);
2651 } else {
2652 __ LoadFromOffset(kLoadUnsignedHalfword,
2653 out,
2654 obj,
2655 data_offset + (const_index << TIMES_2),
2656 null_checker);
2657 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002658 } else {
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002659 Register index_reg = index.AsRegister<Register>();
2660 if (maybe_compressed_char_at) {
2661 MipsLabel uncompressed_load, done;
2662 __ Bnez(TMP, &uncompressed_load);
2663 __ Addu(TMP, obj, index_reg);
2664 __ LoadFromOffset(kLoadUnsignedByte, out, TMP, data_offset);
2665 __ B(&done);
2666 __ Bind(&uncompressed_load);
Chris Larsencd0295d2017-03-31 15:26:54 -07002667 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002668 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset);
2669 __ Bind(&done);
2670 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002671 __ ShiftAndAdd(TMP, index_reg, obj, TIMES_2, TMP);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002672 __ LoadFromOffset(kLoadUnsignedHalfword, out, TMP, data_offset, null_checker);
2673 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002674 }
2675 break;
2676 }
2677
Alexey Frunze15958152017-02-09 19:08:30 -08002678 case Primitive::kPrimInt: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002679 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
Alexey Frunze15958152017-02-09 19:08:30 -08002680 Register out = out_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002681 if (index.IsConstant()) {
2682 size_t offset =
2683 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002684 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002685 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002686 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002687 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002688 }
2689 break;
2690 }
2691
Alexey Frunze15958152017-02-09 19:08:30 -08002692 case Primitive::kPrimNot: {
2693 static_assert(
2694 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
2695 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
2696 // /* HeapReference<Object> */ out =
2697 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
2698 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002699 bool temp_needed = index.IsConstant()
2700 ? !kBakerReadBarrierThunksEnableForFields
2701 : !kBakerReadBarrierThunksEnableForArrays;
2702 Location temp = temp_needed ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze15958152017-02-09 19:08:30 -08002703 // Note that a potential implicit null check is handled in this
2704 // CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier call.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002705 DCHECK(!instruction->CanDoImplicitNullCheckOn(instruction->InputAt(0)));
2706 if (index.IsConstant()) {
2707 // Array load with a constant index can be treated as a field load.
2708 size_t offset =
2709 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2710 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
2711 out_loc,
2712 obj,
2713 offset,
2714 temp,
2715 /* needs_null_check */ false);
2716 } else {
2717 codegen_->GenerateArrayLoadWithBakerReadBarrier(instruction,
2718 out_loc,
2719 obj,
2720 data_offset,
2721 index,
2722 temp,
2723 /* needs_null_check */ false);
2724 }
Alexey Frunze15958152017-02-09 19:08:30 -08002725 } else {
2726 Register out = out_loc.AsRegister<Register>();
2727 if (index.IsConstant()) {
2728 size_t offset =
2729 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2730 __ LoadFromOffset(kLoadWord, out, obj, offset, null_checker);
2731 // If read barriers are enabled, emit read barriers other than
2732 // Baker's using a slow path (and also unpoison the loaded
2733 // reference, if heap poisoning is enabled).
2734 codegen_->MaybeGenerateReadBarrierSlow(instruction, out_loc, out_loc, obj_loc, offset);
2735 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002736 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08002737 __ LoadFromOffset(kLoadWord, out, TMP, data_offset, null_checker);
2738 // If read barriers are enabled, emit read barriers other than
2739 // Baker's using a slow path (and also unpoison the loaded
2740 // reference, if heap poisoning is enabled).
2741 codegen_->MaybeGenerateReadBarrierSlow(instruction,
2742 out_loc,
2743 out_loc,
2744 obj_loc,
2745 data_offset,
2746 index);
2747 }
2748 }
2749 break;
2750 }
2751
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002752 case Primitive::kPrimLong: {
Alexey Frunze15958152017-02-09 19:08:30 -08002753 Register out = out_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002754 if (index.IsConstant()) {
2755 size_t offset =
2756 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002757 __ LoadFromOffset(kLoadDoubleword, out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002758 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002759 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002760 __ LoadFromOffset(kLoadDoubleword, out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002761 }
2762 break;
2763 }
2764
2765 case Primitive::kPrimFloat: {
Alexey Frunze15958152017-02-09 19:08:30 -08002766 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002767 if (index.IsConstant()) {
2768 size_t offset =
2769 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002770 __ LoadSFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002771 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002772 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_4, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002773 __ LoadSFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002774 }
2775 break;
2776 }
2777
2778 case Primitive::kPrimDouble: {
Alexey Frunze15958152017-02-09 19:08:30 -08002779 FRegister out = out_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002780 if (index.IsConstant()) {
2781 size_t offset =
2782 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
Alexey Frunze2923db72016-08-20 01:55:47 -07002783 __ LoadDFromOffset(out, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002784 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002785 __ ShiftAndAdd(TMP, index.AsRegister<Register>(), obj, TIMES_8, TMP);
Alexey Frunze2923db72016-08-20 01:55:47 -07002786 __ LoadDFromOffset(out, TMP, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002787 }
2788 break;
2789 }
2790
2791 case Primitive::kPrimVoid:
2792 LOG(FATAL) << "Unreachable type " << instruction->GetType();
2793 UNREACHABLE();
2794 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002795}
2796
2797void LocationsBuilderMIPS::VisitArrayLength(HArrayLength* instruction) {
2798 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2799 locations->SetInAt(0, Location::RequiresRegister());
2800 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2801}
2802
2803void InstructionCodeGeneratorMIPS::VisitArrayLength(HArrayLength* instruction) {
2804 LocationSummary* locations = instruction->GetLocations();
Vladimir Markodce016e2016-04-28 13:10:02 +01002805 uint32_t offset = CodeGenerator::GetArrayLengthOffset(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002806 Register obj = locations->InAt(0).AsRegister<Register>();
2807 Register out = locations->Out().AsRegister<Register>();
2808 __ LoadFromOffset(kLoadWord, out, obj, offset);
2809 codegen_->MaybeRecordImplicitNullCheck(instruction);
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002810 // Mask out compression flag from String's array length.
2811 if (mirror::kUseStringCompression && instruction->IsStringLength()) {
2812 __ Srl(out, out, 1u);
2813 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002814}
2815
Alexey Frunzef58b2482016-09-02 22:14:06 -07002816Location LocationsBuilderMIPS::RegisterOrZeroConstant(HInstruction* instruction) {
2817 return (instruction->IsConstant() && instruction->AsConstant()->IsZeroBitPattern())
2818 ? Location::ConstantLocation(instruction->AsConstant())
2819 : Location::RequiresRegister();
2820}
2821
2822Location LocationsBuilderMIPS::FpuRegisterOrConstantForStore(HInstruction* instruction) {
2823 // We can store 0.0 directly (from the ZERO register) without loading it into an FPU register.
2824 // We can store a non-zero float or double constant without first loading it into the FPU,
2825 // but we should only prefer this if the constant has a single use.
2826 if (instruction->IsConstant() &&
2827 (instruction->AsConstant()->IsZeroBitPattern() ||
2828 instruction->GetUses().HasExactlyOneElement())) {
2829 return Location::ConstantLocation(instruction->AsConstant());
2830 // Otherwise fall through and require an FPU register for the constant.
2831 }
2832 return Location::RequiresFpuRegister();
2833}
2834
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002835void LocationsBuilderMIPS::VisitArraySet(HArraySet* instruction) {
Alexey Frunze15958152017-02-09 19:08:30 -08002836 Primitive::Type value_type = instruction->GetComponentType();
2837
2838 bool needs_write_barrier =
2839 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2840 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
2841
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002842 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2843 instruction,
Alexey Frunze15958152017-02-09 19:08:30 -08002844 may_need_runtime_call_for_type_check ?
2845 LocationSummary::kCallOnSlowPath :
2846 LocationSummary::kNoCall);
2847
2848 locations->SetInAt(0, Location::RequiresRegister());
2849 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2850 if (Primitive::IsFloatingPointType(instruction->InputAt(2)->GetType())) {
2851 locations->SetInAt(2, FpuRegisterOrConstantForStore(instruction->InputAt(2)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002852 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08002853 locations->SetInAt(2, RegisterOrZeroConstant(instruction->InputAt(2)));
2854 }
2855 if (needs_write_barrier) {
2856 // Temporary register for the write barrier.
2857 locations->AddTemp(Location::RequiresRegister()); // Possibly used for ref. poisoning too.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002858 }
2859}
2860
2861void InstructionCodeGeneratorMIPS::VisitArraySet(HArraySet* instruction) {
2862 LocationSummary* locations = instruction->GetLocations();
2863 Register obj = locations->InAt(0).AsRegister<Register>();
2864 Location index = locations->InAt(1);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002865 Location value_location = locations->InAt(2);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002866 Primitive::Type value_type = instruction->GetComponentType();
Alexey Frunze15958152017-02-09 19:08:30 -08002867 bool may_need_runtime_call_for_type_check = instruction->NeedsTypeCheck();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002868 bool needs_write_barrier =
2869 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
Tijana Jakovljevic57433862017-01-17 16:59:03 +01002870 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002871 Register base_reg = index.IsConstant() ? obj : TMP;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002872
2873 switch (value_type) {
2874 case Primitive::kPrimBoolean:
2875 case Primitive::kPrimByte: {
2876 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002877 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002878 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002879 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002880 __ Addu(base_reg, obj, index.AsRegister<Register>());
2881 }
2882 if (value_location.IsConstant()) {
2883 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2884 __ StoreConstToOffset(kStoreByte, value, base_reg, data_offset, TMP, null_checker);
2885 } else {
2886 Register value = value_location.AsRegister<Register>();
2887 __ StoreToOffset(kStoreByte, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002888 }
2889 break;
2890 }
2891
2892 case Primitive::kPrimShort:
2893 case Primitive::kPrimChar: {
2894 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002895 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002896 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002897 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002898 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_2, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07002899 }
2900 if (value_location.IsConstant()) {
2901 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2902 __ StoreConstToOffset(kStoreHalfword, value, base_reg, data_offset, TMP, null_checker);
2903 } else {
2904 Register value = value_location.AsRegister<Register>();
2905 __ StoreToOffset(kStoreHalfword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002906 }
2907 break;
2908 }
2909
Alexey Frunze15958152017-02-09 19:08:30 -08002910 case Primitive::kPrimInt: {
2911 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2912 if (index.IsConstant()) {
2913 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
2914 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002915 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08002916 }
2917 if (value_location.IsConstant()) {
2918 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2919 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2920 } else {
2921 Register value = value_location.AsRegister<Register>();
2922 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2923 }
2924 break;
2925 }
2926
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002927 case Primitive::kPrimNot: {
Alexey Frunze15958152017-02-09 19:08:30 -08002928 if (value_location.IsConstant()) {
2929 // Just setting null.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002930 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002931 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07002932 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002933 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002934 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002935 }
Alexey Frunze15958152017-02-09 19:08:30 -08002936 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
2937 DCHECK_EQ(value, 0);
2938 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
2939 DCHECK(!needs_write_barrier);
2940 DCHECK(!may_need_runtime_call_for_type_check);
2941 break;
2942 }
2943
2944 DCHECK(needs_write_barrier);
2945 Register value = value_location.AsRegister<Register>();
2946 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
2947 Register temp2 = TMP; // Doesn't need to survive slow path.
2948 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2949 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2950 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2951 MipsLabel done;
2952 SlowPathCodeMIPS* slow_path = nullptr;
2953
2954 if (may_need_runtime_call_for_type_check) {
2955 slow_path = new (GetGraph()->GetArena()) ArraySetSlowPathMIPS(instruction);
2956 codegen_->AddSlowPath(slow_path);
2957 if (instruction->GetValueCanBeNull()) {
2958 MipsLabel non_zero;
2959 __ Bnez(value, &non_zero);
2960 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2961 if (index.IsConstant()) {
2962 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Alexey Frunzec061de12017-02-14 13:27:23 -08002963 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07002964 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzec061de12017-02-14 13:27:23 -08002965 }
Alexey Frunze15958152017-02-09 19:08:30 -08002966 __ StoreToOffset(kStoreWord, value, base_reg, data_offset, null_checker);
2967 __ B(&done);
2968 __ Bind(&non_zero);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02002969 }
Alexey Frunze15958152017-02-09 19:08:30 -08002970
2971 // Note that when read barriers are enabled, the type checks
2972 // are performed without read barriers. This is fine, even in
2973 // the case where a class object is in the from-space after
2974 // the flip, as a comparison involving such a type would not
2975 // produce a false positive; it may of course produce a false
2976 // negative, in which case we would take the ArraySet slow
2977 // path.
2978
2979 // /* HeapReference<Class> */ temp1 = obj->klass_
2980 __ LoadFromOffset(kLoadWord, temp1, obj, class_offset, null_checker);
2981 __ MaybeUnpoisonHeapReference(temp1);
2982
2983 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2984 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
2985 // /* HeapReference<Class> */ temp2 = value->klass_
2986 __ LoadFromOffset(kLoadWord, temp2, value, class_offset);
2987 // If heap poisoning is enabled, no need to unpoison `temp1`
2988 // nor `temp2`, as we are comparing two poisoned references.
2989
2990 if (instruction->StaticTypeOfArrayIsObjectArray()) {
2991 MipsLabel do_put;
2992 __ Beq(temp1, temp2, &do_put);
2993 // If heap poisoning is enabled, the `temp1` reference has
2994 // not been unpoisoned yet; unpoison it now.
2995 __ MaybeUnpoisonHeapReference(temp1);
2996
2997 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2998 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
2999 // If heap poisoning is enabled, no need to unpoison
3000 // `temp1`, as we are comparing against null below.
3001 __ Bnez(temp1, slow_path->GetEntryLabel());
3002 __ Bind(&do_put);
3003 } else {
3004 __ Bne(temp1, temp2, slow_path->GetEntryLabel());
3005 }
3006 }
3007
3008 Register source = value;
3009 if (kPoisonHeapReferences) {
3010 // Note that in the case where `value` is a null reference,
3011 // we do not enter this block, as a null reference does not
3012 // need poisoning.
3013 __ Move(temp1, value);
3014 __ PoisonHeapReference(temp1);
3015 source = temp1;
3016 }
3017
3018 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
3019 if (index.IsConstant()) {
3020 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003021 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003022 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunze15958152017-02-09 19:08:30 -08003023 }
3024 __ StoreToOffset(kStoreWord, source, base_reg, data_offset);
3025
3026 if (!may_need_runtime_call_for_type_check) {
3027 codegen_->MaybeRecordImplicitNullCheck(instruction);
3028 }
3029
3030 codegen_->MarkGCCard(obj, value, instruction->GetValueCanBeNull());
3031
3032 if (done.IsLinked()) {
3033 __ Bind(&done);
3034 }
3035
3036 if (slow_path != nullptr) {
3037 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003038 }
3039 break;
3040 }
3041
3042 case Primitive::kPrimLong: {
3043 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003044 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003045 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003046 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003047 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003048 }
3049 if (value_location.IsConstant()) {
3050 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3051 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3052 } else {
3053 Register value = value_location.AsRegisterPairLow<Register>();
3054 __ StoreToOffset(kStoreDoubleword, value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003055 }
3056 break;
3057 }
3058
3059 case Primitive::kPrimFloat: {
3060 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003061 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003062 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003063 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003064 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_4, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003065 }
3066 if (value_location.IsConstant()) {
3067 int32_t value = CodeGenerator::GetInt32ValueOf(value_location.GetConstant());
3068 __ StoreConstToOffset(kStoreWord, value, base_reg, data_offset, TMP, null_checker);
3069 } else {
3070 FRegister value = value_location.AsFpuRegister<FRegister>();
3071 __ StoreSToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003072 }
3073 break;
3074 }
3075
3076 case Primitive::kPrimDouble: {
3077 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003078 if (index.IsConstant()) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07003079 data_offset += index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003080 } else {
Chris Larsencd0295d2017-03-31 15:26:54 -07003081 __ ShiftAndAdd(base_reg, index.AsRegister<Register>(), obj, TIMES_8, base_reg);
Alexey Frunzef58b2482016-09-02 22:14:06 -07003082 }
3083 if (value_location.IsConstant()) {
3084 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
3085 __ StoreConstToOffset(kStoreDoubleword, value, base_reg, data_offset, TMP, null_checker);
3086 } else {
3087 FRegister value = value_location.AsFpuRegister<FRegister>();
3088 __ StoreDToOffset(value, base_reg, data_offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003089 }
3090 break;
3091 }
3092
3093 case Primitive::kPrimVoid:
3094 LOG(FATAL) << "Unreachable type " << instruction->GetType();
3095 UNREACHABLE();
3096 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003097}
3098
3099void LocationsBuilderMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003100 RegisterSet caller_saves = RegisterSet::Empty();
3101 InvokeRuntimeCallingConvention calling_convention;
3102 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3103 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
3104 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction, caller_saves);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003105 locations->SetInAt(0, Location::RequiresRegister());
3106 locations->SetInAt(1, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003107}
3108
3109void InstructionCodeGeneratorMIPS::VisitBoundsCheck(HBoundsCheck* instruction) {
3110 LocationSummary* locations = instruction->GetLocations();
3111 BoundsCheckSlowPathMIPS* slow_path =
3112 new (GetGraph()->GetArena()) BoundsCheckSlowPathMIPS(instruction);
3113 codegen_->AddSlowPath(slow_path);
3114
3115 Register index = locations->InAt(0).AsRegister<Register>();
3116 Register length = locations->InAt(1).AsRegister<Register>();
3117
3118 // length is limited by the maximum positive signed 32-bit integer.
3119 // Unsigned comparison of length and index checks for index < 0
3120 // and for length <= index simultaneously.
3121 __ Bgeu(index, length, slow_path->GetEntryLabel());
3122}
3123
Alexey Frunze15958152017-02-09 19:08:30 -08003124// Temp is used for read barrier.
3125static size_t NumberOfInstanceOfTemps(TypeCheckKind type_check_kind) {
3126 if (kEmitCompilerReadBarrier &&
Alexey Frunze4147fcc2017-06-17 19:57:27 -07003127 !(kUseBakerReadBarrier && kBakerReadBarrierThunksEnableForFields) &&
Alexey Frunze15958152017-02-09 19:08:30 -08003128 (kUseBakerReadBarrier ||
3129 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3130 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3131 type_check_kind == TypeCheckKind::kArrayObjectCheck)) {
3132 return 1;
3133 }
3134 return 0;
3135}
3136
3137// Extra temp is used for read barrier.
3138static size_t NumberOfCheckCastTemps(TypeCheckKind type_check_kind) {
3139 return 1 + NumberOfInstanceOfTemps(type_check_kind);
3140}
3141
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003142void LocationsBuilderMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003143 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
3144 bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
3145
3146 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
3147 switch (type_check_kind) {
3148 case TypeCheckKind::kExactCheck:
3149 case TypeCheckKind::kAbstractClassCheck:
3150 case TypeCheckKind::kClassHierarchyCheck:
3151 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08003152 call_kind = (throws_into_catch || kEmitCompilerReadBarrier)
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003153 ? LocationSummary::kCallOnSlowPath
3154 : LocationSummary::kNoCall; // In fact, call on a fatal (non-returning) slow path.
3155 break;
3156 case TypeCheckKind::kArrayCheck:
3157 case TypeCheckKind::kUnresolvedCheck:
3158 case TypeCheckKind::kInterfaceCheck:
3159 call_kind = LocationSummary::kCallOnSlowPath;
3160 break;
3161 }
3162
3163 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003164 locations->SetInAt(0, Location::RequiresRegister());
3165 locations->SetInAt(1, Location::RequiresRegister());
Alexey Frunze15958152017-02-09 19:08:30 -08003166 locations->AddRegisterTemps(NumberOfCheckCastTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003167}
3168
3169void InstructionCodeGeneratorMIPS::VisitCheckCast(HCheckCast* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003170 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003171 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08003172 Location obj_loc = locations->InAt(0);
3173 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003174 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08003175 Location temp_loc = locations->GetTemp(0);
3176 Register temp = temp_loc.AsRegister<Register>();
3177 const size_t num_temps = NumberOfCheckCastTemps(type_check_kind);
3178 DCHECK_LE(num_temps, 2u);
3179 Location maybe_temp2_loc = (num_temps >= 2) ? locations->GetTemp(1) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003180 const uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3181 const uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
3182 const uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
3183 const uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
3184 const uint32_t iftable_offset = mirror::Class::IfTableOffset().Uint32Value();
3185 const uint32_t array_length_offset = mirror::Array::LengthOffset().Uint32Value();
3186 const uint32_t object_array_data_offset =
3187 mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
3188 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003189
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003190 // Always false for read barriers since we may need to go to the entrypoint for non-fatal cases
3191 // from false negatives. The false negatives may come from avoiding read barriers below. Avoiding
3192 // read barriers is done for performance and code size reasons.
3193 bool is_type_check_slow_path_fatal = false;
3194 if (!kEmitCompilerReadBarrier) {
3195 is_type_check_slow_path_fatal =
3196 (type_check_kind == TypeCheckKind::kExactCheck ||
3197 type_check_kind == TypeCheckKind::kAbstractClassCheck ||
3198 type_check_kind == TypeCheckKind::kClassHierarchyCheck ||
3199 type_check_kind == TypeCheckKind::kArrayObjectCheck) &&
3200 !instruction->CanThrowIntoCatchBlock();
3201 }
3202 SlowPathCodeMIPS* slow_path =
3203 new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
3204 is_type_check_slow_path_fatal);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003205 codegen_->AddSlowPath(slow_path);
3206
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003207 // Avoid this check if we know `obj` is not null.
3208 if (instruction->MustDoNullCheck()) {
3209 __ Beqz(obj, &done);
3210 }
3211
3212 switch (type_check_kind) {
3213 case TypeCheckKind::kExactCheck:
3214 case TypeCheckKind::kArrayCheck: {
3215 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003216 GenerateReferenceLoadTwoRegisters(instruction,
3217 temp_loc,
3218 obj_loc,
3219 class_offset,
3220 maybe_temp2_loc,
3221 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003222 // Jump to slow path for throwing the exception or doing a
3223 // more involved array check.
3224 __ Bne(temp, cls, slow_path->GetEntryLabel());
3225 break;
3226 }
3227
3228 case TypeCheckKind::kAbstractClassCheck: {
3229 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003230 GenerateReferenceLoadTwoRegisters(instruction,
3231 temp_loc,
3232 obj_loc,
3233 class_offset,
3234 maybe_temp2_loc,
3235 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003236 // If the class is abstract, we eagerly fetch the super class of the
3237 // object to avoid doing a comparison we know will fail.
3238 MipsLabel loop;
3239 __ Bind(&loop);
3240 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003241 GenerateReferenceLoadOneRegister(instruction,
3242 temp_loc,
3243 super_offset,
3244 maybe_temp2_loc,
3245 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003246 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3247 // exception.
3248 __ Beqz(temp, slow_path->GetEntryLabel());
3249 // Otherwise, compare the classes.
3250 __ Bne(temp, cls, &loop);
3251 break;
3252 }
3253
3254 case TypeCheckKind::kClassHierarchyCheck: {
3255 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003256 GenerateReferenceLoadTwoRegisters(instruction,
3257 temp_loc,
3258 obj_loc,
3259 class_offset,
3260 maybe_temp2_loc,
3261 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003262 // Walk over the class hierarchy to find a match.
3263 MipsLabel loop;
3264 __ Bind(&loop);
3265 __ Beq(temp, cls, &done);
3266 // /* HeapReference<Class> */ temp = temp->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08003267 GenerateReferenceLoadOneRegister(instruction,
3268 temp_loc,
3269 super_offset,
3270 maybe_temp2_loc,
3271 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003272 // If the class reference currently in `temp` is null, jump to the slow path to throw the
3273 // exception. Otherwise, jump to the beginning of the loop.
3274 __ Bnez(temp, &loop);
3275 __ B(slow_path->GetEntryLabel());
3276 break;
3277 }
3278
3279 case TypeCheckKind::kArrayObjectCheck: {
3280 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003281 GenerateReferenceLoadTwoRegisters(instruction,
3282 temp_loc,
3283 obj_loc,
3284 class_offset,
3285 maybe_temp2_loc,
3286 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003287 // Do an exact check.
3288 __ Beq(temp, cls, &done);
3289 // Otherwise, we need to check that the object's class is a non-primitive array.
3290 // /* HeapReference<Class> */ temp = temp->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08003291 GenerateReferenceLoadOneRegister(instruction,
3292 temp_loc,
3293 component_offset,
3294 maybe_temp2_loc,
3295 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003296 // If the component type is null, jump to the slow path to throw the exception.
3297 __ Beqz(temp, slow_path->GetEntryLabel());
3298 // Otherwise, the object is indeed an array, further check that this component
3299 // type is not a primitive type.
3300 __ LoadFromOffset(kLoadUnsignedHalfword, temp, temp, primitive_offset);
3301 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
3302 __ Bnez(temp, slow_path->GetEntryLabel());
3303 break;
3304 }
3305
3306 case TypeCheckKind::kUnresolvedCheck:
3307 // We always go into the type check slow path for the unresolved check case.
3308 // We cannot directly call the CheckCast runtime entry point
3309 // without resorting to a type checking slow path here (i.e. by
3310 // calling InvokeRuntime directly), as it would require to
3311 // assign fixed registers for the inputs of this HInstanceOf
3312 // instruction (following the runtime calling convention), which
3313 // might be cluttered by the potential first read barrier
3314 // emission at the beginning of this method.
3315 __ B(slow_path->GetEntryLabel());
3316 break;
3317
3318 case TypeCheckKind::kInterfaceCheck: {
3319 // Avoid read barriers to improve performance of the fast path. We can not get false
3320 // positives by doing this.
3321 // /* HeapReference<Class> */ temp = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08003322 GenerateReferenceLoadTwoRegisters(instruction,
3323 temp_loc,
3324 obj_loc,
3325 class_offset,
3326 maybe_temp2_loc,
3327 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003328 // /* HeapReference<Class> */ temp = temp->iftable_
Alexey Frunze15958152017-02-09 19:08:30 -08003329 GenerateReferenceLoadTwoRegisters(instruction,
3330 temp_loc,
3331 temp_loc,
3332 iftable_offset,
3333 maybe_temp2_loc,
3334 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08003335 // Iftable is never null.
3336 __ Lw(TMP, temp, array_length_offset);
3337 // Loop through the iftable and check if any class matches.
3338 MipsLabel loop;
3339 __ Bind(&loop);
3340 __ Addiu(temp, temp, 2 * kHeapReferenceSize); // Possibly in delay slot on R2.
3341 __ Beqz(TMP, slow_path->GetEntryLabel());
3342 __ Lw(AT, temp, object_array_data_offset - 2 * kHeapReferenceSize);
3343 __ MaybeUnpoisonHeapReference(AT);
3344 // Go to next interface.
3345 __ Addiu(TMP, TMP, -2);
3346 // Compare the classes and continue the loop if they do not match.
3347 __ Bne(AT, cls, &loop);
3348 break;
3349 }
3350 }
3351
3352 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003353 __ Bind(slow_path->GetExitLabel());
3354}
3355
3356void LocationsBuilderMIPS::VisitClinitCheck(HClinitCheck* check) {
3357 LocationSummary* locations =
3358 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3359 locations->SetInAt(0, Location::RequiresRegister());
3360 if (check->HasUses()) {
3361 locations->SetOut(Location::SameAsFirstInput());
3362 }
3363}
3364
3365void InstructionCodeGeneratorMIPS::VisitClinitCheck(HClinitCheck* check) {
3366 // We assume the class is not null.
3367 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
3368 check->GetLoadClass(),
3369 check,
3370 check->GetDexPc(),
3371 true);
3372 codegen_->AddSlowPath(slow_path);
3373 GenerateClassInitializationCheck(slow_path,
3374 check->GetLocations()->InAt(0).AsRegister<Register>());
3375}
3376
3377void LocationsBuilderMIPS::VisitCompare(HCompare* compare) {
3378 Primitive::Type in_type = compare->InputAt(0)->GetType();
3379
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003380 LocationSummary* locations =
3381 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003382
3383 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003384 case Primitive::kPrimBoolean:
3385 case Primitive::kPrimByte:
3386 case Primitive::kPrimShort:
3387 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003388 case Primitive::kPrimInt:
Alexey Frunzee7697712016-09-15 21:37:49 -07003389 locations->SetInAt(0, Location::RequiresRegister());
3390 locations->SetInAt(1, Location::RequiresRegister());
3391 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3392 break;
3393
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003394 case Primitive::kPrimLong:
3395 locations->SetInAt(0, Location::RequiresRegister());
3396 locations->SetInAt(1, Location::RequiresRegister());
3397 // Output overlaps because it is written before doing the low comparison.
3398 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
3399 break;
3400
3401 case Primitive::kPrimFloat:
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003402 case Primitive::kPrimDouble:
3403 locations->SetInAt(0, Location::RequiresFpuRegister());
3404 locations->SetInAt(1, Location::RequiresFpuRegister());
3405 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003406 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003407
3408 default:
3409 LOG(FATAL) << "Unexpected type for compare operation " << in_type;
3410 }
3411}
3412
3413void InstructionCodeGeneratorMIPS::VisitCompare(HCompare* instruction) {
3414 LocationSummary* locations = instruction->GetLocations();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003415 Register res = locations->Out().AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003416 Primitive::Type in_type = instruction->InputAt(0)->GetType();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003417 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003418
3419 // 0 if: left == right
3420 // 1 if: left > right
3421 // -1 if: left < right
3422 switch (in_type) {
Roland Levillaina5c4a402016-03-15 15:02:50 +00003423 case Primitive::kPrimBoolean:
3424 case Primitive::kPrimByte:
3425 case Primitive::kPrimShort:
3426 case Primitive::kPrimChar:
Aart Bika19616e2016-02-01 18:57:58 -08003427 case Primitive::kPrimInt: {
3428 Register lhs = locations->InAt(0).AsRegister<Register>();
3429 Register rhs = locations->InAt(1).AsRegister<Register>();
3430 __ Slt(TMP, lhs, rhs);
3431 __ Slt(res, rhs, lhs);
3432 __ Subu(res, res, TMP);
3433 break;
3434 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003435 case Primitive::kPrimLong: {
3436 MipsLabel done;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003437 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
3438 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
3439 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
3440 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
3441 // TODO: more efficient (direct) comparison with a constant.
3442 __ Slt(TMP, lhs_high, rhs_high);
3443 __ Slt(AT, rhs_high, lhs_high); // Inverted: is actually gt.
3444 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3445 __ Bnez(res, &done); // If we compared ==, check if lower bits are also equal.
3446 __ Sltu(TMP, lhs_low, rhs_low);
3447 __ Sltu(AT, rhs_low, lhs_low); // Inverted: is actually gt.
3448 __ Subu(res, AT, TMP); // Result -1:1:0 for [ <, >, == ].
3449 __ Bind(&done);
3450 break;
3451 }
3452
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003453 case Primitive::kPrimFloat: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003454 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003455 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3456 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3457 MipsLabel done;
3458 if (isR6) {
3459 __ CmpEqS(FTMP, lhs, rhs);
3460 __ LoadConst32(res, 0);
3461 __ Bc1nez(FTMP, &done);
3462 if (gt_bias) {
3463 __ CmpLtS(FTMP, lhs, rhs);
3464 __ LoadConst32(res, -1);
3465 __ Bc1nez(FTMP, &done);
3466 __ LoadConst32(res, 1);
3467 } else {
3468 __ CmpLtS(FTMP, rhs, lhs);
3469 __ LoadConst32(res, 1);
3470 __ Bc1nez(FTMP, &done);
3471 __ LoadConst32(res, -1);
3472 }
3473 } else {
3474 if (gt_bias) {
3475 __ ColtS(0, lhs, rhs);
3476 __ LoadConst32(res, -1);
3477 __ Bc1t(0, &done);
3478 __ CeqS(0, lhs, rhs);
3479 __ LoadConst32(res, 1);
3480 __ Movt(res, ZERO, 0);
3481 } else {
3482 __ ColtS(0, rhs, lhs);
3483 __ LoadConst32(res, 1);
3484 __ Bc1t(0, &done);
3485 __ CeqS(0, lhs, rhs);
3486 __ LoadConst32(res, -1);
3487 __ Movt(res, ZERO, 0);
3488 }
3489 }
3490 __ Bind(&done);
3491 break;
3492 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003493 case Primitive::kPrimDouble: {
Roland Levillain32ca3752016-02-17 16:49:37 +00003494 bool gt_bias = instruction->IsGtBias();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003495 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3496 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3497 MipsLabel done;
3498 if (isR6) {
3499 __ CmpEqD(FTMP, lhs, rhs);
3500 __ LoadConst32(res, 0);
3501 __ Bc1nez(FTMP, &done);
3502 if (gt_bias) {
3503 __ CmpLtD(FTMP, lhs, rhs);
3504 __ LoadConst32(res, -1);
3505 __ Bc1nez(FTMP, &done);
3506 __ LoadConst32(res, 1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003507 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003508 __ CmpLtD(FTMP, rhs, lhs);
3509 __ LoadConst32(res, 1);
3510 __ Bc1nez(FTMP, &done);
3511 __ LoadConst32(res, -1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003512 }
3513 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003514 if (gt_bias) {
3515 __ ColtD(0, lhs, rhs);
3516 __ LoadConst32(res, -1);
3517 __ Bc1t(0, &done);
3518 __ CeqD(0, lhs, rhs);
3519 __ LoadConst32(res, 1);
3520 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003521 } else {
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003522 __ ColtD(0, rhs, lhs);
3523 __ LoadConst32(res, 1);
3524 __ Bc1t(0, &done);
3525 __ CeqD(0, lhs, rhs);
3526 __ LoadConst32(res, -1);
3527 __ Movt(res, ZERO, 0);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003528 }
3529 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003530 __ Bind(&done);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003531 break;
3532 }
3533
3534 default:
3535 LOG(FATAL) << "Unimplemented compare type " << in_type;
3536 }
3537}
3538
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003539void LocationsBuilderMIPS::HandleCondition(HCondition* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003540 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003541 switch (instruction->InputAt(0)->GetType()) {
3542 default:
3543 case Primitive::kPrimLong:
3544 locations->SetInAt(0, Location::RequiresRegister());
3545 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
3546 break;
3547
3548 case Primitive::kPrimFloat:
3549 case Primitive::kPrimDouble:
3550 locations->SetInAt(0, Location::RequiresFpuRegister());
3551 locations->SetInAt(1, Location::RequiresFpuRegister());
3552 break;
3553 }
David Brazdilb3e773e2016-01-26 11:28:37 +00003554 if (!instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003555 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3556 }
3557}
3558
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00003559void InstructionCodeGeneratorMIPS::HandleCondition(HCondition* instruction) {
David Brazdilb3e773e2016-01-26 11:28:37 +00003560 if (instruction->IsEmittedAtUseSite()) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003561 return;
3562 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003563
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003564 Primitive::Type type = instruction->InputAt(0)->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003565 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003566
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003567 switch (type) {
3568 default:
3569 // Integer case.
3570 GenerateIntCompare(instruction->GetCondition(), locations);
3571 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003572
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003573 case Primitive::kPrimLong:
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01003574 GenerateLongCompare(instruction->GetCondition(), locations);
3575 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003576
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003577 case Primitive::kPrimFloat:
3578 case Primitive::kPrimDouble:
Alexey Frunze2ddb7172016-09-06 17:04:55 -07003579 GenerateFpCompare(instruction->GetCondition(), instruction->IsGtBias(), type, locations);
3580 return;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003581 }
3582}
3583
Alexey Frunze7e99e052015-11-24 19:28:01 -08003584void InstructionCodeGeneratorMIPS::DivRemOneOrMinusOne(HBinaryOperation* instruction) {
3585 DCHECK(instruction->IsDiv() || instruction->IsRem());
3586 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3587
3588 LocationSummary* locations = instruction->GetLocations();
3589 Location second = locations->InAt(1);
3590 DCHECK(second.IsConstant());
3591
3592 Register out = locations->Out().AsRegister<Register>();
3593 Register dividend = locations->InAt(0).AsRegister<Register>();
3594 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3595 DCHECK(imm == 1 || imm == -1);
3596
3597 if (instruction->IsRem()) {
3598 __ Move(out, ZERO);
3599 } else {
3600 if (imm == -1) {
3601 __ Subu(out, ZERO, dividend);
3602 } else if (out != dividend) {
3603 __ Move(out, dividend);
3604 }
3605 }
3606}
3607
3608void InstructionCodeGeneratorMIPS::DivRemByPowerOfTwo(HBinaryOperation* instruction) {
3609 DCHECK(instruction->IsDiv() || instruction->IsRem());
3610 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3611
3612 LocationSummary* locations = instruction->GetLocations();
3613 Location second = locations->InAt(1);
3614 DCHECK(second.IsConstant());
3615
3616 Register out = locations->Out().AsRegister<Register>();
3617 Register dividend = locations->InAt(0).AsRegister<Register>();
3618 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003619 uint32_t abs_imm = static_cast<uint32_t>(AbsOrMin(imm));
Alexey Frunze7e99e052015-11-24 19:28:01 -08003620 int ctz_imm = CTZ(abs_imm);
3621
3622 if (instruction->IsDiv()) {
3623 if (ctz_imm == 1) {
3624 // Fast path for division by +/-2, which is very common.
3625 __ Srl(TMP, dividend, 31);
3626 } else {
3627 __ Sra(TMP, dividend, 31);
3628 __ Srl(TMP, TMP, 32 - ctz_imm);
3629 }
3630 __ Addu(out, dividend, TMP);
3631 __ Sra(out, out, ctz_imm);
3632 if (imm < 0) {
3633 __ Subu(out, ZERO, out);
3634 }
3635 } else {
3636 if (ctz_imm == 1) {
3637 // Fast path for modulo +/-2, which is very common.
3638 __ Sra(TMP, dividend, 31);
3639 __ Subu(out, dividend, TMP);
3640 __ Andi(out, out, 1);
3641 __ Addu(out, out, TMP);
3642 } else {
3643 __ Sra(TMP, dividend, 31);
3644 __ Srl(TMP, TMP, 32 - ctz_imm);
3645 __ Addu(out, dividend, TMP);
3646 if (IsUint<16>(abs_imm - 1)) {
3647 __ Andi(out, out, abs_imm - 1);
3648 } else {
3649 __ Sll(out, out, 32 - ctz_imm);
3650 __ Srl(out, out, 32 - ctz_imm);
3651 }
3652 __ Subu(out, out, TMP);
3653 }
3654 }
3655}
3656
3657void InstructionCodeGeneratorMIPS::GenerateDivRemWithAnyConstant(HBinaryOperation* instruction) {
3658 DCHECK(instruction->IsDiv() || instruction->IsRem());
3659 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3660
3661 LocationSummary* locations = instruction->GetLocations();
3662 Location second = locations->InAt(1);
3663 DCHECK(second.IsConstant());
3664
3665 Register out = locations->Out().AsRegister<Register>();
3666 Register dividend = locations->InAt(0).AsRegister<Register>();
3667 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3668
3669 int64_t magic;
3670 int shift;
3671 CalculateMagicAndShiftForDivRem(imm, false /* is_long */, &magic, &shift);
3672
3673 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3674
3675 __ LoadConst32(TMP, magic);
3676 if (isR6) {
3677 __ MuhR6(TMP, dividend, TMP);
3678 } else {
3679 __ MultR2(dividend, TMP);
3680 __ Mfhi(TMP);
3681 }
3682 if (imm > 0 && magic < 0) {
3683 __ Addu(TMP, TMP, dividend);
3684 } else if (imm < 0 && magic > 0) {
3685 __ Subu(TMP, TMP, dividend);
3686 }
3687
3688 if (shift != 0) {
3689 __ Sra(TMP, TMP, shift);
3690 }
3691
3692 if (instruction->IsDiv()) {
3693 __ Sra(out, TMP, 31);
3694 __ Subu(out, TMP, out);
3695 } else {
3696 __ Sra(AT, TMP, 31);
3697 __ Subu(AT, TMP, AT);
3698 __ LoadConst32(TMP, imm);
3699 if (isR6) {
3700 __ MulR6(TMP, AT, TMP);
3701 } else {
3702 __ MulR2(TMP, AT, TMP);
3703 }
3704 __ Subu(out, dividend, TMP);
3705 }
3706}
3707
3708void InstructionCodeGeneratorMIPS::GenerateDivRemIntegral(HBinaryOperation* instruction) {
3709 DCHECK(instruction->IsDiv() || instruction->IsRem());
3710 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimInt);
3711
3712 LocationSummary* locations = instruction->GetLocations();
3713 Register out = locations->Out().AsRegister<Register>();
3714 Location second = locations->InAt(1);
3715
3716 if (second.IsConstant()) {
3717 int32_t imm = second.GetConstant()->AsIntConstant()->GetValue();
3718 if (imm == 0) {
3719 // Do not generate anything. DivZeroCheck would prevent any code to be executed.
3720 } else if (imm == 1 || imm == -1) {
3721 DivRemOneOrMinusOne(instruction);
Nicolas Geoffray68f62892016-01-04 08:39:49 +00003722 } else if (IsPowerOfTwo(AbsOrMin(imm))) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003723 DivRemByPowerOfTwo(instruction);
3724 } else {
3725 DCHECK(imm <= -2 || imm >= 2);
3726 GenerateDivRemWithAnyConstant(instruction);
3727 }
3728 } else {
3729 Register dividend = locations->InAt(0).AsRegister<Register>();
3730 Register divisor = second.AsRegister<Register>();
3731 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
3732 if (instruction->IsDiv()) {
3733 if (isR6) {
3734 __ DivR6(out, dividend, divisor);
3735 } else {
3736 __ DivR2(out, dividend, divisor);
3737 }
3738 } else {
3739 if (isR6) {
3740 __ ModR6(out, dividend, divisor);
3741 } else {
3742 __ ModR2(out, dividend, divisor);
3743 }
3744 }
3745 }
3746}
3747
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003748void LocationsBuilderMIPS::VisitDiv(HDiv* div) {
3749 Primitive::Type type = div->GetResultType();
3750 LocationSummary::CallKind call_kind = (type == Primitive::kPrimLong)
Serban Constantinescu54ff4822016-07-07 18:03:19 +01003751 ? LocationSummary::kCallOnMainOnly
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003752 : LocationSummary::kNoCall;
3753
3754 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
3755
3756 switch (type) {
3757 case Primitive::kPrimInt:
3758 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08003759 locations->SetInAt(1, Location::RegisterOrConstant(div->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003760 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3761 break;
3762
3763 case Primitive::kPrimLong: {
3764 InvokeRuntimeCallingConvention calling_convention;
3765 locations->SetInAt(0, Location::RegisterPairLocation(
3766 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
3767 locations->SetInAt(1, Location::RegisterPairLocation(
3768 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
3769 locations->SetOut(calling_convention.GetReturnLocation(type));
3770 break;
3771 }
3772
3773 case Primitive::kPrimFloat:
3774 case Primitive::kPrimDouble:
3775 locations->SetInAt(0, Location::RequiresFpuRegister());
3776 locations->SetInAt(1, Location::RequiresFpuRegister());
3777 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
3778 break;
3779
3780 default:
3781 LOG(FATAL) << "Unexpected div type " << type;
3782 }
3783}
3784
3785void InstructionCodeGeneratorMIPS::VisitDiv(HDiv* instruction) {
3786 Primitive::Type type = instruction->GetType();
3787 LocationSummary* locations = instruction->GetLocations();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003788
3789 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08003790 case Primitive::kPrimInt:
3791 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003792 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003793 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01003794 codegen_->InvokeRuntime(kQuickLdiv, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003795 CheckEntrypointTypes<kQuickLdiv, int64_t, int64_t, int64_t>();
3796 break;
3797 }
3798 case Primitive::kPrimFloat:
3799 case Primitive::kPrimDouble: {
3800 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
3801 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
3802 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
3803 if (type == Primitive::kPrimFloat) {
3804 __ DivS(dst, lhs, rhs);
3805 } else {
3806 __ DivD(dst, lhs, rhs);
3807 }
3808 break;
3809 }
3810 default:
3811 LOG(FATAL) << "Unexpected div type " << type;
3812 }
3813}
3814
3815void LocationsBuilderMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01003816 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003817 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003818}
3819
3820void InstructionCodeGeneratorMIPS::VisitDivZeroCheck(HDivZeroCheck* instruction) {
3821 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathMIPS(instruction);
3822 codegen_->AddSlowPath(slow_path);
3823 Location value = instruction->GetLocations()->InAt(0);
3824 Primitive::Type type = instruction->GetType();
3825
3826 switch (type) {
Nicolas Geoffraye5671612016-03-16 11:03:54 +00003827 case Primitive::kPrimBoolean:
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02003828 case Primitive::kPrimByte:
3829 case Primitive::kPrimChar:
3830 case Primitive::kPrimShort:
3831 case Primitive::kPrimInt: {
3832 if (value.IsConstant()) {
3833 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
3834 __ B(slow_path->GetEntryLabel());
3835 } else {
3836 // A division by a non-null constant is valid. We don't need to perform
3837 // any check, so simply fall through.
3838 }
3839 } else {
3840 DCHECK(value.IsRegister()) << value;
3841 __ Beqz(value.AsRegister<Register>(), slow_path->GetEntryLabel());
3842 }
3843 break;
3844 }
3845 case Primitive::kPrimLong: {
3846 if (value.IsConstant()) {
3847 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
3848 __ B(slow_path->GetEntryLabel());
3849 } else {
3850 // A division by a non-null constant is valid. We don't need to perform
3851 // any check, so simply fall through.
3852 }
3853 } else {
3854 DCHECK(value.IsRegisterPair()) << value;
3855 __ Or(TMP, value.AsRegisterPairHigh<Register>(), value.AsRegisterPairLow<Register>());
3856 __ Beqz(TMP, slow_path->GetEntryLabel());
3857 }
3858 break;
3859 }
3860 default:
3861 LOG(FATAL) << "Unexpected type " << type << " for DivZeroCheck.";
3862 }
3863}
3864
3865void LocationsBuilderMIPS::VisitDoubleConstant(HDoubleConstant* constant) {
3866 LocationSummary* locations =
3867 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3868 locations->SetOut(Location::ConstantLocation(constant));
3869}
3870
3871void InstructionCodeGeneratorMIPS::VisitDoubleConstant(HDoubleConstant* cst ATTRIBUTE_UNUSED) {
3872 // Will be generated at use site.
3873}
3874
3875void LocationsBuilderMIPS::VisitExit(HExit* exit) {
3876 exit->SetLocations(nullptr);
3877}
3878
3879void InstructionCodeGeneratorMIPS::VisitExit(HExit* exit ATTRIBUTE_UNUSED) {
3880}
3881
3882void LocationsBuilderMIPS::VisitFloatConstant(HFloatConstant* constant) {
3883 LocationSummary* locations =
3884 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
3885 locations->SetOut(Location::ConstantLocation(constant));
3886}
3887
3888void InstructionCodeGeneratorMIPS::VisitFloatConstant(HFloatConstant* constant ATTRIBUTE_UNUSED) {
3889 // Will be generated at use site.
3890}
3891
3892void LocationsBuilderMIPS::VisitGoto(HGoto* got) {
3893 got->SetLocations(nullptr);
3894}
3895
3896void InstructionCodeGeneratorMIPS::HandleGoto(HInstruction* got, HBasicBlock* successor) {
3897 DCHECK(!successor->IsExitBlock());
3898 HBasicBlock* block = got->GetBlock();
3899 HInstruction* previous = got->GetPrevious();
3900 HLoopInformation* info = block->GetLoopInformation();
3901
3902 if (info != nullptr && info->IsBackEdge(*block) && info->HasSuspendCheck()) {
3903 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
3904 GenerateSuspendCheck(info->GetSuspendCheck(), successor);
3905 return;
3906 }
3907 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
3908 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
3909 }
3910 if (!codegen_->GoesToNextBlock(block, successor)) {
3911 __ B(codegen_->GetLabelOf(successor));
3912 }
3913}
3914
3915void InstructionCodeGeneratorMIPS::VisitGoto(HGoto* got) {
3916 HandleGoto(got, got->GetSuccessor());
3917}
3918
3919void LocationsBuilderMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3920 try_boundary->SetLocations(nullptr);
3921}
3922
3923void InstructionCodeGeneratorMIPS::VisitTryBoundary(HTryBoundary* try_boundary) {
3924 HBasicBlock* successor = try_boundary->GetNormalFlowSuccessor();
3925 if (!successor->IsExitBlock()) {
3926 HandleGoto(try_boundary, successor);
3927 }
3928}
3929
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003930void InstructionCodeGeneratorMIPS::GenerateIntCompare(IfCondition cond,
3931 LocationSummary* locations) {
3932 Register dst = locations->Out().AsRegister<Register>();
3933 Register lhs = locations->InAt(0).AsRegister<Register>();
3934 Location rhs_location = locations->InAt(1);
3935 Register rhs_reg = ZERO;
3936 int64_t rhs_imm = 0;
3937 bool use_imm = rhs_location.IsConstant();
3938 if (use_imm) {
3939 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
3940 } else {
3941 rhs_reg = rhs_location.AsRegister<Register>();
3942 }
3943
3944 switch (cond) {
3945 case kCondEQ:
3946 case kCondNE:
Alexey Frunzee7697712016-09-15 21:37:49 -07003947 if (use_imm && IsInt<16>(-rhs_imm)) {
3948 if (rhs_imm == 0) {
3949 if (cond == kCondEQ) {
3950 __ Sltiu(dst, lhs, 1);
3951 } else {
3952 __ Sltu(dst, ZERO, lhs);
3953 }
3954 } else {
3955 __ Addiu(dst, lhs, -rhs_imm);
3956 if (cond == kCondEQ) {
3957 __ Sltiu(dst, dst, 1);
3958 } else {
3959 __ Sltu(dst, ZERO, dst);
3960 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003961 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003962 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07003963 if (use_imm && IsUint<16>(rhs_imm)) {
3964 __ Xori(dst, lhs, rhs_imm);
3965 } else {
3966 if (use_imm) {
3967 rhs_reg = TMP;
3968 __ LoadConst32(rhs_reg, rhs_imm);
3969 }
3970 __ Xor(dst, lhs, rhs_reg);
3971 }
3972 if (cond == kCondEQ) {
3973 __ Sltiu(dst, dst, 1);
3974 } else {
3975 __ Sltu(dst, ZERO, dst);
3976 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08003977 }
3978 break;
3979
3980 case kCondLT:
3981 case kCondGE:
3982 if (use_imm && IsInt<16>(rhs_imm)) {
3983 __ Slti(dst, lhs, rhs_imm);
3984 } else {
3985 if (use_imm) {
3986 rhs_reg = TMP;
3987 __ LoadConst32(rhs_reg, rhs_imm);
3988 }
3989 __ Slt(dst, lhs, rhs_reg);
3990 }
3991 if (cond == kCondGE) {
3992 // Simulate lhs >= rhs via !(lhs < rhs) since there's
3993 // only the slt instruction but no sge.
3994 __ Xori(dst, dst, 1);
3995 }
3996 break;
3997
3998 case kCondLE:
3999 case kCondGT:
4000 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4001 // Simulate lhs <= rhs via lhs < rhs + 1.
4002 __ Slti(dst, lhs, rhs_imm + 1);
4003 if (cond == kCondGT) {
4004 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4005 // only the slti instruction but no sgti.
4006 __ Xori(dst, dst, 1);
4007 }
4008 } else {
4009 if (use_imm) {
4010 rhs_reg = TMP;
4011 __ LoadConst32(rhs_reg, rhs_imm);
4012 }
4013 __ Slt(dst, rhs_reg, lhs);
4014 if (cond == kCondLE) {
4015 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4016 // only the slt instruction but no sle.
4017 __ Xori(dst, dst, 1);
4018 }
4019 }
4020 break;
4021
4022 case kCondB:
4023 case kCondAE:
4024 if (use_imm && IsInt<16>(rhs_imm)) {
4025 // Sltiu sign-extends its 16-bit immediate operand before
4026 // the comparison and thus lets us compare directly with
4027 // unsigned values in the ranges [0, 0x7fff] and
4028 // [0xffff8000, 0xffffffff].
4029 __ Sltiu(dst, lhs, rhs_imm);
4030 } else {
4031 if (use_imm) {
4032 rhs_reg = TMP;
4033 __ LoadConst32(rhs_reg, rhs_imm);
4034 }
4035 __ Sltu(dst, lhs, rhs_reg);
4036 }
4037 if (cond == kCondAE) {
4038 // Simulate lhs >= rhs via !(lhs < rhs) since there's
4039 // only the sltu instruction but no sgeu.
4040 __ Xori(dst, dst, 1);
4041 }
4042 break;
4043
4044 case kCondBE:
4045 case kCondA:
4046 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4047 // Simulate lhs <= rhs via lhs < rhs + 1.
4048 // Note that this only works if rhs + 1 does not overflow
4049 // to 0, hence the check above.
4050 // Sltiu sign-extends its 16-bit immediate operand before
4051 // the comparison and thus lets us compare directly with
4052 // unsigned values in the ranges [0, 0x7fff] and
4053 // [0xffff8000, 0xffffffff].
4054 __ Sltiu(dst, lhs, rhs_imm + 1);
4055 if (cond == kCondA) {
4056 // Simulate lhs > rhs via !(lhs <= rhs) since there's
4057 // only the sltiu instruction but no sgtiu.
4058 __ Xori(dst, dst, 1);
4059 }
4060 } else {
4061 if (use_imm) {
4062 rhs_reg = TMP;
4063 __ LoadConst32(rhs_reg, rhs_imm);
4064 }
4065 __ Sltu(dst, rhs_reg, lhs);
4066 if (cond == kCondBE) {
4067 // Simulate lhs <= rhs via !(rhs < lhs) since there's
4068 // only the sltu instruction but no sleu.
4069 __ Xori(dst, dst, 1);
4070 }
4071 }
4072 break;
4073 }
4074}
4075
Alexey Frunze674b9ee2016-09-20 14:54:15 -07004076bool InstructionCodeGeneratorMIPS::MaterializeIntCompare(IfCondition cond,
4077 LocationSummary* input_locations,
4078 Register dst) {
4079 Register lhs = input_locations->InAt(0).AsRegister<Register>();
4080 Location rhs_location = input_locations->InAt(1);
4081 Register rhs_reg = ZERO;
4082 int64_t rhs_imm = 0;
4083 bool use_imm = rhs_location.IsConstant();
4084 if (use_imm) {
4085 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4086 } else {
4087 rhs_reg = rhs_location.AsRegister<Register>();
4088 }
4089
4090 switch (cond) {
4091 case kCondEQ:
4092 case kCondNE:
4093 if (use_imm && IsInt<16>(-rhs_imm)) {
4094 __ Addiu(dst, lhs, -rhs_imm);
4095 } else if (use_imm && IsUint<16>(rhs_imm)) {
4096 __ Xori(dst, lhs, rhs_imm);
4097 } else {
4098 if (use_imm) {
4099 rhs_reg = TMP;
4100 __ LoadConst32(rhs_reg, rhs_imm);
4101 }
4102 __ Xor(dst, lhs, rhs_reg);
4103 }
4104 return (cond == kCondEQ);
4105
4106 case kCondLT:
4107 case kCondGE:
4108 if (use_imm && IsInt<16>(rhs_imm)) {
4109 __ Slti(dst, lhs, rhs_imm);
4110 } else {
4111 if (use_imm) {
4112 rhs_reg = TMP;
4113 __ LoadConst32(rhs_reg, rhs_imm);
4114 }
4115 __ Slt(dst, lhs, rhs_reg);
4116 }
4117 return (cond == kCondGE);
4118
4119 case kCondLE:
4120 case kCondGT:
4121 if (use_imm && IsInt<16>(rhs_imm + 1)) {
4122 // Simulate lhs <= rhs via lhs < rhs + 1.
4123 __ Slti(dst, lhs, rhs_imm + 1);
4124 return (cond == kCondGT);
4125 } else {
4126 if (use_imm) {
4127 rhs_reg = TMP;
4128 __ LoadConst32(rhs_reg, rhs_imm);
4129 }
4130 __ Slt(dst, rhs_reg, lhs);
4131 return (cond == kCondLE);
4132 }
4133
4134 case kCondB:
4135 case kCondAE:
4136 if (use_imm && IsInt<16>(rhs_imm)) {
4137 // Sltiu sign-extends its 16-bit immediate operand before
4138 // the comparison and thus lets us compare directly with
4139 // unsigned values in the ranges [0, 0x7fff] and
4140 // [0xffff8000, 0xffffffff].
4141 __ Sltiu(dst, lhs, rhs_imm);
4142 } else {
4143 if (use_imm) {
4144 rhs_reg = TMP;
4145 __ LoadConst32(rhs_reg, rhs_imm);
4146 }
4147 __ Sltu(dst, lhs, rhs_reg);
4148 }
4149 return (cond == kCondAE);
4150
4151 case kCondBE:
4152 case kCondA:
4153 if (use_imm && (rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4154 // Simulate lhs <= rhs via lhs < rhs + 1.
4155 // Note that this only works if rhs + 1 does not overflow
4156 // to 0, hence the check above.
4157 // Sltiu sign-extends its 16-bit immediate operand before
4158 // the comparison and thus lets us compare directly with
4159 // unsigned values in the ranges [0, 0x7fff] and
4160 // [0xffff8000, 0xffffffff].
4161 __ Sltiu(dst, lhs, rhs_imm + 1);
4162 return (cond == kCondA);
4163 } else {
4164 if (use_imm) {
4165 rhs_reg = TMP;
4166 __ LoadConst32(rhs_reg, rhs_imm);
4167 }
4168 __ Sltu(dst, rhs_reg, lhs);
4169 return (cond == kCondBE);
4170 }
4171 }
4172}
4173
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004174void InstructionCodeGeneratorMIPS::GenerateIntCompareAndBranch(IfCondition cond,
4175 LocationSummary* locations,
4176 MipsLabel* label) {
4177 Register lhs = locations->InAt(0).AsRegister<Register>();
4178 Location rhs_location = locations->InAt(1);
4179 Register rhs_reg = ZERO;
Alexey Frunzee7697712016-09-15 21:37:49 -07004180 int64_t rhs_imm = 0;
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004181 bool use_imm = rhs_location.IsConstant();
4182 if (use_imm) {
4183 rhs_imm = CodeGenerator::GetInt32ValueOf(rhs_location.GetConstant());
4184 } else {
4185 rhs_reg = rhs_location.AsRegister<Register>();
4186 }
4187
4188 if (use_imm && rhs_imm == 0) {
4189 switch (cond) {
4190 case kCondEQ:
4191 case kCondBE: // <= 0 if zero
4192 __ Beqz(lhs, label);
4193 break;
4194 case kCondNE:
4195 case kCondA: // > 0 if non-zero
4196 __ Bnez(lhs, label);
4197 break;
4198 case kCondLT:
4199 __ Bltz(lhs, label);
4200 break;
4201 case kCondGE:
4202 __ Bgez(lhs, label);
4203 break;
4204 case kCondLE:
4205 __ Blez(lhs, label);
4206 break;
4207 case kCondGT:
4208 __ Bgtz(lhs, label);
4209 break;
4210 case kCondB: // always false
4211 break;
4212 case kCondAE: // always true
4213 __ B(label);
4214 break;
4215 }
4216 } else {
Alexey Frunzee7697712016-09-15 21:37:49 -07004217 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4218 if (isR6 || !use_imm) {
4219 if (use_imm) {
4220 rhs_reg = TMP;
4221 __ LoadConst32(rhs_reg, rhs_imm);
4222 }
4223 switch (cond) {
4224 case kCondEQ:
4225 __ Beq(lhs, rhs_reg, label);
4226 break;
4227 case kCondNE:
4228 __ Bne(lhs, rhs_reg, label);
4229 break;
4230 case kCondLT:
4231 __ Blt(lhs, rhs_reg, label);
4232 break;
4233 case kCondGE:
4234 __ Bge(lhs, rhs_reg, label);
4235 break;
4236 case kCondLE:
4237 __ Bge(rhs_reg, lhs, label);
4238 break;
4239 case kCondGT:
4240 __ Blt(rhs_reg, lhs, label);
4241 break;
4242 case kCondB:
4243 __ Bltu(lhs, rhs_reg, label);
4244 break;
4245 case kCondAE:
4246 __ Bgeu(lhs, rhs_reg, label);
4247 break;
4248 case kCondBE:
4249 __ Bgeu(rhs_reg, lhs, label);
4250 break;
4251 case kCondA:
4252 __ Bltu(rhs_reg, lhs, label);
4253 break;
4254 }
4255 } else {
4256 // Special cases for more efficient comparison with constants on R2.
4257 switch (cond) {
4258 case kCondEQ:
4259 __ LoadConst32(TMP, rhs_imm);
4260 __ Beq(lhs, TMP, label);
4261 break;
4262 case kCondNE:
4263 __ LoadConst32(TMP, rhs_imm);
4264 __ Bne(lhs, TMP, label);
4265 break;
4266 case kCondLT:
4267 if (IsInt<16>(rhs_imm)) {
4268 __ Slti(TMP, lhs, rhs_imm);
4269 __ Bnez(TMP, label);
4270 } else {
4271 __ LoadConst32(TMP, rhs_imm);
4272 __ Blt(lhs, TMP, label);
4273 }
4274 break;
4275 case kCondGE:
4276 if (IsInt<16>(rhs_imm)) {
4277 __ Slti(TMP, lhs, rhs_imm);
4278 __ Beqz(TMP, label);
4279 } else {
4280 __ LoadConst32(TMP, rhs_imm);
4281 __ Bge(lhs, TMP, label);
4282 }
4283 break;
4284 case kCondLE:
4285 if (IsInt<16>(rhs_imm + 1)) {
4286 // Simulate lhs <= rhs via lhs < rhs + 1.
4287 __ Slti(TMP, lhs, rhs_imm + 1);
4288 __ Bnez(TMP, label);
4289 } else {
4290 __ LoadConst32(TMP, rhs_imm);
4291 __ Bge(TMP, lhs, label);
4292 }
4293 break;
4294 case kCondGT:
4295 if (IsInt<16>(rhs_imm + 1)) {
4296 // Simulate lhs > rhs via !(lhs < rhs + 1).
4297 __ Slti(TMP, lhs, rhs_imm + 1);
4298 __ Beqz(TMP, label);
4299 } else {
4300 __ LoadConst32(TMP, rhs_imm);
4301 __ Blt(TMP, lhs, label);
4302 }
4303 break;
4304 case kCondB:
4305 if (IsInt<16>(rhs_imm)) {
4306 __ Sltiu(TMP, lhs, rhs_imm);
4307 __ Bnez(TMP, label);
4308 } else {
4309 __ LoadConst32(TMP, rhs_imm);
4310 __ Bltu(lhs, TMP, label);
4311 }
4312 break;
4313 case kCondAE:
4314 if (IsInt<16>(rhs_imm)) {
4315 __ Sltiu(TMP, lhs, rhs_imm);
4316 __ Beqz(TMP, label);
4317 } else {
4318 __ LoadConst32(TMP, rhs_imm);
4319 __ Bgeu(lhs, TMP, label);
4320 }
4321 break;
4322 case kCondBE:
4323 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4324 // Simulate lhs <= rhs via lhs < rhs + 1.
4325 // Note that this only works if rhs + 1 does not overflow
4326 // to 0, hence the check above.
4327 __ Sltiu(TMP, lhs, rhs_imm + 1);
4328 __ Bnez(TMP, label);
4329 } else {
4330 __ LoadConst32(TMP, rhs_imm);
4331 __ Bgeu(TMP, lhs, label);
4332 }
4333 break;
4334 case kCondA:
4335 if ((rhs_imm != -1) && IsInt<16>(rhs_imm + 1)) {
4336 // Simulate lhs > rhs via !(lhs < rhs + 1).
4337 // Note that this only works if rhs + 1 does not overflow
4338 // to 0, hence the check above.
4339 __ Sltiu(TMP, lhs, rhs_imm + 1);
4340 __ Beqz(TMP, label);
4341 } else {
4342 __ LoadConst32(TMP, rhs_imm);
4343 __ Bltu(TMP, lhs, label);
4344 }
4345 break;
4346 }
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004347 }
4348 }
4349}
4350
Tijana Jakovljevic6d482aa2017-02-03 13:24:08 +01004351void InstructionCodeGeneratorMIPS::GenerateLongCompare(IfCondition cond,
4352 LocationSummary* locations) {
4353 Register dst = locations->Out().AsRegister<Register>();
4354 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4355 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4356 Location rhs_location = locations->InAt(1);
4357 Register rhs_high = ZERO;
4358 Register rhs_low = ZERO;
4359 int64_t imm = 0;
4360 uint32_t imm_high = 0;
4361 uint32_t imm_low = 0;
4362 bool use_imm = rhs_location.IsConstant();
4363 if (use_imm) {
4364 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4365 imm_high = High32Bits(imm);
4366 imm_low = Low32Bits(imm);
4367 } else {
4368 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4369 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4370 }
4371 if (use_imm && imm == 0) {
4372 switch (cond) {
4373 case kCondEQ:
4374 case kCondBE: // <= 0 if zero
4375 __ Or(dst, lhs_high, lhs_low);
4376 __ Sltiu(dst, dst, 1);
4377 break;
4378 case kCondNE:
4379 case kCondA: // > 0 if non-zero
4380 __ Or(dst, lhs_high, lhs_low);
4381 __ Sltu(dst, ZERO, dst);
4382 break;
4383 case kCondLT:
4384 __ Slt(dst, lhs_high, ZERO);
4385 break;
4386 case kCondGE:
4387 __ Slt(dst, lhs_high, ZERO);
4388 __ Xori(dst, dst, 1);
4389 break;
4390 case kCondLE:
4391 __ Or(TMP, lhs_high, lhs_low);
4392 __ Sra(AT, lhs_high, 31);
4393 __ Sltu(dst, AT, TMP);
4394 __ Xori(dst, dst, 1);
4395 break;
4396 case kCondGT:
4397 __ Or(TMP, lhs_high, lhs_low);
4398 __ Sra(AT, lhs_high, 31);
4399 __ Sltu(dst, AT, TMP);
4400 break;
4401 case kCondB: // always false
4402 __ Andi(dst, dst, 0);
4403 break;
4404 case kCondAE: // always true
4405 __ Ori(dst, ZERO, 1);
4406 break;
4407 }
4408 } else if (use_imm) {
4409 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4410 switch (cond) {
4411 case kCondEQ:
4412 __ LoadConst32(TMP, imm_high);
4413 __ Xor(TMP, TMP, lhs_high);
4414 __ LoadConst32(AT, imm_low);
4415 __ Xor(AT, AT, lhs_low);
4416 __ Or(dst, TMP, AT);
4417 __ Sltiu(dst, dst, 1);
4418 break;
4419 case kCondNE:
4420 __ LoadConst32(TMP, imm_high);
4421 __ Xor(TMP, TMP, lhs_high);
4422 __ LoadConst32(AT, imm_low);
4423 __ Xor(AT, AT, lhs_low);
4424 __ Or(dst, TMP, AT);
4425 __ Sltu(dst, ZERO, dst);
4426 break;
4427 case kCondLT:
4428 case kCondGE:
4429 if (dst == lhs_low) {
4430 __ LoadConst32(TMP, imm_low);
4431 __ Sltu(dst, lhs_low, TMP);
4432 }
4433 __ LoadConst32(TMP, imm_high);
4434 __ Slt(AT, lhs_high, TMP);
4435 __ Slt(TMP, TMP, lhs_high);
4436 if (dst != lhs_low) {
4437 __ LoadConst32(dst, imm_low);
4438 __ Sltu(dst, lhs_low, dst);
4439 }
4440 __ Slt(dst, TMP, dst);
4441 __ Or(dst, dst, AT);
4442 if (cond == kCondGE) {
4443 __ Xori(dst, dst, 1);
4444 }
4445 break;
4446 case kCondGT:
4447 case kCondLE:
4448 if (dst == lhs_low) {
4449 __ LoadConst32(TMP, imm_low);
4450 __ Sltu(dst, TMP, lhs_low);
4451 }
4452 __ LoadConst32(TMP, imm_high);
4453 __ Slt(AT, TMP, lhs_high);
4454 __ Slt(TMP, lhs_high, TMP);
4455 if (dst != lhs_low) {
4456 __ LoadConst32(dst, imm_low);
4457 __ Sltu(dst, dst, lhs_low);
4458 }
4459 __ Slt(dst, TMP, dst);
4460 __ Or(dst, dst, AT);
4461 if (cond == kCondLE) {
4462 __ Xori(dst, dst, 1);
4463 }
4464 break;
4465 case kCondB:
4466 case kCondAE:
4467 if (dst == lhs_low) {
4468 __ LoadConst32(TMP, imm_low);
4469 __ Sltu(dst, lhs_low, TMP);
4470 }
4471 __ LoadConst32(TMP, imm_high);
4472 __ Sltu(AT, lhs_high, TMP);
4473 __ Sltu(TMP, TMP, lhs_high);
4474 if (dst != lhs_low) {
4475 __ LoadConst32(dst, imm_low);
4476 __ Sltu(dst, lhs_low, dst);
4477 }
4478 __ Slt(dst, TMP, dst);
4479 __ Or(dst, dst, AT);
4480 if (cond == kCondAE) {
4481 __ Xori(dst, dst, 1);
4482 }
4483 break;
4484 case kCondA:
4485 case kCondBE:
4486 if (dst == lhs_low) {
4487 __ LoadConst32(TMP, imm_low);
4488 __ Sltu(dst, TMP, lhs_low);
4489 }
4490 __ LoadConst32(TMP, imm_high);
4491 __ Sltu(AT, TMP, lhs_high);
4492 __ Sltu(TMP, lhs_high, TMP);
4493 if (dst != lhs_low) {
4494 __ LoadConst32(dst, imm_low);
4495 __ Sltu(dst, dst, lhs_low);
4496 }
4497 __ Slt(dst, TMP, dst);
4498 __ Or(dst, dst, AT);
4499 if (cond == kCondBE) {
4500 __ Xori(dst, dst, 1);
4501 }
4502 break;
4503 }
4504 } else {
4505 switch (cond) {
4506 case kCondEQ:
4507 __ Xor(TMP, lhs_high, rhs_high);
4508 __ Xor(AT, lhs_low, rhs_low);
4509 __ Or(dst, TMP, AT);
4510 __ Sltiu(dst, dst, 1);
4511 break;
4512 case kCondNE:
4513 __ Xor(TMP, lhs_high, rhs_high);
4514 __ Xor(AT, lhs_low, rhs_low);
4515 __ Or(dst, TMP, AT);
4516 __ Sltu(dst, ZERO, dst);
4517 break;
4518 case kCondLT:
4519 case kCondGE:
4520 __ Slt(TMP, rhs_high, lhs_high);
4521 __ Sltu(AT, lhs_low, rhs_low);
4522 __ Slt(TMP, TMP, AT);
4523 __ Slt(AT, lhs_high, rhs_high);
4524 __ Or(dst, AT, TMP);
4525 if (cond == kCondGE) {
4526 __ Xori(dst, dst, 1);
4527 }
4528 break;
4529 case kCondGT:
4530 case kCondLE:
4531 __ Slt(TMP, lhs_high, rhs_high);
4532 __ Sltu(AT, rhs_low, lhs_low);
4533 __ Slt(TMP, TMP, AT);
4534 __ Slt(AT, rhs_high, lhs_high);
4535 __ Or(dst, AT, TMP);
4536 if (cond == kCondLE) {
4537 __ Xori(dst, dst, 1);
4538 }
4539 break;
4540 case kCondB:
4541 case kCondAE:
4542 __ Sltu(TMP, rhs_high, lhs_high);
4543 __ Sltu(AT, lhs_low, rhs_low);
4544 __ Slt(TMP, TMP, AT);
4545 __ Sltu(AT, lhs_high, rhs_high);
4546 __ Or(dst, AT, TMP);
4547 if (cond == kCondAE) {
4548 __ Xori(dst, dst, 1);
4549 }
4550 break;
4551 case kCondA:
4552 case kCondBE:
4553 __ Sltu(TMP, lhs_high, rhs_high);
4554 __ Sltu(AT, rhs_low, lhs_low);
4555 __ Slt(TMP, TMP, AT);
4556 __ Sltu(AT, rhs_high, lhs_high);
4557 __ Or(dst, AT, TMP);
4558 if (cond == kCondBE) {
4559 __ Xori(dst, dst, 1);
4560 }
4561 break;
4562 }
4563 }
4564}
4565
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08004566void InstructionCodeGeneratorMIPS::GenerateLongCompareAndBranch(IfCondition cond,
4567 LocationSummary* locations,
4568 MipsLabel* label) {
4569 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
4570 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
4571 Location rhs_location = locations->InAt(1);
4572 Register rhs_high = ZERO;
4573 Register rhs_low = ZERO;
4574 int64_t imm = 0;
4575 uint32_t imm_high = 0;
4576 uint32_t imm_low = 0;
4577 bool use_imm = rhs_location.IsConstant();
4578 if (use_imm) {
4579 imm = rhs_location.GetConstant()->AsLongConstant()->GetValue();
4580 imm_high = High32Bits(imm);
4581 imm_low = Low32Bits(imm);
4582 } else {
4583 rhs_high = rhs_location.AsRegisterPairHigh<Register>();
4584 rhs_low = rhs_location.AsRegisterPairLow<Register>();
4585 }
4586
4587 if (use_imm && imm == 0) {
4588 switch (cond) {
4589 case kCondEQ:
4590 case kCondBE: // <= 0 if zero
4591 __ Or(TMP, lhs_high, lhs_low);
4592 __ Beqz(TMP, label);
4593 break;
4594 case kCondNE:
4595 case kCondA: // > 0 if non-zero
4596 __ Or(TMP, lhs_high, lhs_low);
4597 __ Bnez(TMP, label);
4598 break;
4599 case kCondLT:
4600 __ Bltz(lhs_high, label);
4601 break;
4602 case kCondGE:
4603 __ Bgez(lhs_high, label);
4604 break;
4605 case kCondLE:
4606 __ Or(TMP, lhs_high, lhs_low);
4607 __ Sra(AT, lhs_high, 31);
4608 __ Bgeu(AT, TMP, label);
4609 break;
4610 case kCondGT:
4611 __ Or(TMP, lhs_high, lhs_low);
4612 __ Sra(AT, lhs_high, 31);
4613 __ Bltu(AT, TMP, label);
4614 break;
4615 case kCondB: // always false
4616 break;
4617 case kCondAE: // always true
4618 __ B(label);
4619 break;
4620 }
4621 } else if (use_imm) {
4622 // TODO: more efficient comparison with constants without loading them into TMP/AT.
4623 switch (cond) {
4624 case kCondEQ:
4625 __ LoadConst32(TMP, imm_high);
4626 __ Xor(TMP, TMP, lhs_high);
4627 __ LoadConst32(AT, imm_low);
4628 __ Xor(AT, AT, lhs_low);
4629 __ Or(TMP, TMP, AT);
4630 __ Beqz(TMP, label);
4631 break;
4632 case kCondNE:
4633 __ LoadConst32(TMP, imm_high);
4634 __ Xor(TMP, TMP, lhs_high);
4635 __ LoadConst32(AT, imm_low);
4636 __ Xor(AT, AT, lhs_low);
4637 __ Or(TMP, TMP, AT);
4638 __ Bnez(TMP, label);
4639 break;
4640 case kCondLT:
4641 __ LoadConst32(TMP, imm_high);
4642 __ Blt(lhs_high, TMP, label);
4643 __ Slt(TMP, TMP, lhs_high);
4644 __ LoadConst32(AT, imm_low);
4645 __ Sltu(AT, lhs_low, AT);
4646 __ Blt(TMP, AT, label);
4647 break;
4648 case kCondGE:
4649 __ LoadConst32(TMP, imm_high);
4650 __ Blt(TMP, lhs_high, label);
4651 __ Slt(TMP, lhs_high, TMP);
4652 __ LoadConst32(AT, imm_low);
4653 __ Sltu(AT, lhs_low, AT);
4654 __ Or(TMP, TMP, AT);
4655 __ Beqz(TMP, label);
4656 break;
4657 case kCondLE:
4658 __ LoadConst32(TMP, imm_high);
4659 __ Blt(lhs_high, TMP, label);
4660 __ Slt(TMP, TMP, lhs_high);
4661 __ LoadConst32(AT, imm_low);
4662 __ Sltu(AT, AT, lhs_low);
4663 __ Or(TMP, TMP, AT);
4664 __ Beqz(TMP, label);
4665 break;
4666 case kCondGT:
4667 __ LoadConst32(TMP, imm_high);
4668 __ Blt(TMP, lhs_high, label);
4669 __ Slt(TMP, lhs_high, TMP);
4670 __ LoadConst32(AT, imm_low);
4671 __ Sltu(AT, AT, lhs_low);
4672 __ Blt(TMP, AT, label);
4673 break;
4674 case kCondB:
4675 __ LoadConst32(TMP, imm_high);
4676 __ Bltu(lhs_high, TMP, label);
4677 __ Sltu(TMP, TMP, lhs_high);
4678 __ LoadConst32(AT, imm_low);
4679 __ Sltu(AT, lhs_low, AT);
4680 __ Blt(TMP, AT, label);
4681 break;
4682 case kCondAE:
4683 __ LoadConst32(TMP, imm_high);
4684 __ Bltu(TMP, lhs_high, label);
4685 __ Sltu(TMP, lhs_high, TMP);
4686 __ LoadConst32(AT, imm_low);
4687 __ Sltu(AT, lhs_low, AT);
4688 __ Or(TMP, TMP, AT);
4689 __ Beqz(TMP, label);
4690 break;
4691 case kCondBE:
4692 __ LoadConst32(TMP, imm_high);
4693 __ Bltu(lhs_high, TMP, label);
4694 __ Sltu(TMP, TMP, lhs_high);
4695 __ LoadConst32(AT, imm_low);
4696 __ Sltu(AT, AT, lhs_low);
4697 __ Or(TMP, TMP, AT);
4698 __ Beqz(TMP, label);
4699 break;
4700 case kCondA:
4701 __ LoadConst32(TMP, imm_high);
4702 __ Bltu(TMP, lhs_high, label);
4703 __ Sltu(TMP, lhs_high, TMP);
4704 __ LoadConst32(AT, imm_low);
4705 __ Sltu(AT, AT, lhs_low);
4706 __ Blt(TMP, AT, label);
4707 break;
4708 }
4709 } else {
4710 switch (cond) {
4711 case kCondEQ:
4712 __ Xor(TMP, lhs_high, rhs_high);
4713 __ Xor(AT, lhs_low, rhs_low);
4714 __ Or(TMP, TMP, AT);
4715 __ Beqz(TMP, label);
4716 break;
4717 case kCondNE:
4718 __ Xor(TMP, lhs_high, rhs_high);
4719 __ Xor(AT, lhs_low, rhs_low);
4720 __ Or(TMP, TMP, AT);
4721 __ Bnez(TMP, label);
4722 break;
4723 case kCondLT:
4724 __ Blt(lhs_high, rhs_high, label);
4725 __ Slt(TMP, rhs_high, lhs_high);
4726 __ Sltu(AT, lhs_low, rhs_low);
4727 __ Blt(TMP, AT, label);
4728 break;
4729 case kCondGE:
4730 __ Blt(rhs_high, lhs_high, label);
4731 __ Slt(TMP, lhs_high, rhs_high);
4732 __ Sltu(AT, lhs_low, rhs_low);
4733 __ Or(TMP, TMP, AT);
4734 __ Beqz(TMP, label);
4735 break;
4736 case kCondLE:
4737 __ Blt(lhs_high, rhs_high, label);
4738 __ Slt(TMP, rhs_high, lhs_high);
4739 __ Sltu(AT, rhs_low, lhs_low);
4740 __ Or(TMP, TMP, AT);
4741 __ Beqz(TMP, label);
4742 break;
4743 case kCondGT:
4744 __ Blt(rhs_high, lhs_high, label);
4745 __ Slt(TMP, lhs_high, rhs_high);
4746 __ Sltu(AT, rhs_low, lhs_low);
4747 __ Blt(TMP, AT, label);
4748 break;
4749 case kCondB:
4750 __ Bltu(lhs_high, rhs_high, label);
4751 __ Sltu(TMP, rhs_high, lhs_high);
4752 __ Sltu(AT, lhs_low, rhs_low);
4753 __ Blt(TMP, AT, label);
4754 break;
4755 case kCondAE:
4756 __ Bltu(rhs_high, lhs_high, label);
4757 __ Sltu(TMP, lhs_high, rhs_high);
4758 __ Sltu(AT, lhs_low, rhs_low);
4759 __ Or(TMP, TMP, AT);
4760 __ Beqz(TMP, label);
4761 break;
4762 case kCondBE:
4763 __ Bltu(lhs_high, rhs_high, label);
4764 __ Sltu(TMP, rhs_high, lhs_high);
4765 __ Sltu(AT, rhs_low, lhs_low);
4766 __ Or(TMP, TMP, AT);
4767 __ Beqz(TMP, label);
4768 break;
4769 case kCondA:
4770 __ Bltu(rhs_high, lhs_high, label);
4771 __ Sltu(TMP, lhs_high, rhs_high);
4772 __ Sltu(AT, rhs_low, lhs_low);
4773 __ Blt(TMP, AT, label);
4774 break;
4775 }
4776 }
4777}
4778
Alexey Frunze2ddb7172016-09-06 17:04:55 -07004779void InstructionCodeGeneratorMIPS::GenerateFpCompare(IfCondition cond,
4780 bool gt_bias,
4781 Primitive::Type type,
4782 LocationSummary* locations) {
4783 Register dst = locations->Out().AsRegister<Register>();
4784 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
4785 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
4786 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
4787 if (type == Primitive::kPrimFloat) {
4788 if (isR6) {
4789 switch (cond) {
4790 case kCondEQ:
4791 __ CmpEqS(FTMP, lhs, rhs);
4792 __ Mfc1(dst, FTMP);
4793 __ Andi(dst, dst, 1);
4794 break;
4795 case kCondNE:
4796 __ CmpEqS(FTMP, lhs, rhs);
4797 __ Mfc1(dst, FTMP);
4798 __ Addiu(dst, dst, 1);
4799 break;
4800 case kCondLT:
4801 if (gt_bias) {
4802 __ CmpLtS(FTMP, lhs, rhs);
4803 } else {
4804 __ CmpUltS(FTMP, lhs, rhs);
4805 }
4806 __ Mfc1(dst, FTMP);
4807 __ Andi(dst, dst, 1);
4808 break;
4809 case kCondLE:
4810 if (gt_bias) {
4811 __ CmpLeS(FTMP, lhs, rhs);
4812 } else {
4813 __ CmpUleS(FTMP, lhs, rhs);
4814 }
4815 __ Mfc1(dst, FTMP);
4816 __ Andi(dst, dst, 1);
4817 break;
4818 case kCondGT:
4819 if (gt_bias) {
4820 __ CmpUltS(FTMP, rhs, lhs);
4821 } else {
4822 __ CmpLtS(FTMP, rhs, lhs);
4823 }
4824 __ Mfc1(dst, FTMP);
4825 __ Andi(dst, dst, 1);
4826 break;
4827 case kCondGE:
4828 if (gt_bias) {
4829 __ CmpUleS(FTMP, rhs, lhs);
4830 } else {
4831 __ CmpLeS(FTMP, rhs, lhs);
4832 }
4833 __ Mfc1(dst, FTMP);
4834 __ Andi(dst, dst, 1);
4835 break;
4836 default:
4837 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4838 UNREACHABLE();
4839 }
4840 } else {
4841 switch (cond) {
4842 case kCondEQ:
4843 __ CeqS(0, lhs, rhs);
4844 __ LoadConst32(dst, 1);
4845 __ Movf(dst, ZERO, 0);
4846 break;
4847 case kCondNE:
4848 __ CeqS(0, lhs, rhs);
4849 __ LoadConst32(dst, 1);
4850 __ Movt(dst, ZERO, 0);
4851 break;
4852 case kCondLT:
4853 if (gt_bias) {
4854 __ ColtS(0, lhs, rhs);
4855 } else {
4856 __ CultS(0, lhs, rhs);
4857 }
4858 __ LoadConst32(dst, 1);
4859 __ Movf(dst, ZERO, 0);
4860 break;
4861 case kCondLE:
4862 if (gt_bias) {
4863 __ ColeS(0, lhs, rhs);
4864 } else {
4865 __ CuleS(0, lhs, rhs);
4866 }
4867 __ LoadConst32(dst, 1);
4868 __ Movf(dst, ZERO, 0);
4869 break;
4870 case kCondGT:
4871 if (gt_bias) {
4872 __ CultS(0, rhs, lhs);
4873 } else {
4874 __ ColtS(0, rhs, lhs);
4875 }
4876 __ LoadConst32(dst, 1);
4877 __ Movf(dst, ZERO, 0);
4878 break;
4879 case kCondGE:
4880 if (gt_bias) {
4881 __ CuleS(0, rhs, lhs);
4882 } else {
4883 __ ColeS(0, rhs, lhs);
4884 }
4885 __ LoadConst32(dst, 1);
4886 __ Movf(dst, ZERO, 0);
4887 break;
4888 default:
4889 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4890 UNREACHABLE();
4891 }
4892 }
4893 } else {
4894 DCHECK_EQ(type, Primitive::kPrimDouble);
4895 if (isR6) {
4896 switch (cond) {
4897 case kCondEQ:
4898 __ CmpEqD(FTMP, lhs, rhs);
4899 __ Mfc1(dst, FTMP);
4900 __ Andi(dst, dst, 1);
4901 break;
4902 case kCondNE:
4903 __ CmpEqD(FTMP, lhs, rhs);
4904 __ Mfc1(dst, FTMP);
4905 __ Addiu(dst, dst, 1);
4906 break;
4907 case kCondLT:
4908 if (gt_bias) {
4909 __ CmpLtD(FTMP, lhs, rhs);
4910 } else {
4911 __ CmpUltD(FTMP, lhs, rhs);
4912 }
4913 __ Mfc1(dst, FTMP);
4914 __ Andi(dst, dst, 1);
4915 break;
4916 case kCondLE:
4917 if (gt_bias) {
4918 __ CmpLeD(FTMP, lhs, rhs);
4919 } else {
4920 __ CmpUleD(FTMP, lhs, rhs);
4921 }
4922 __ Mfc1(dst, FTMP);
4923 __ Andi(dst, dst, 1);
4924 break;
4925 case kCondGT:
4926 if (gt_bias) {
4927 __ CmpUltD(FTMP, rhs, lhs);
4928 } else {
4929 __ CmpLtD(FTMP, rhs, lhs);
4930 }
4931 __ Mfc1(dst, FTMP);
4932 __ Andi(dst, dst, 1);
4933 break;
4934 case kCondGE:
4935 if (gt_bias) {
4936 __ CmpUleD(FTMP, rhs, lhs);
4937 } else {
4938 __ CmpLeD(FTMP, rhs, lhs);
4939 }
4940 __ Mfc1(dst, FTMP);
4941 __ Andi(dst, dst, 1);
4942 break;
4943 default:
4944 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4945 UNREACHABLE();
4946 }
4947 } else {
4948 switch (cond) {
4949 case kCondEQ:
4950 __ CeqD(0, lhs, rhs);
4951 __ LoadConst32(dst, 1);
4952 __ Movf(dst, ZERO, 0);
4953 break;
4954 case kCondNE:
4955 __ CeqD(0, lhs, rhs);
4956 __ LoadConst32(dst, 1);
4957 __ Movt(dst, ZERO, 0);
4958 break;
4959 case kCondLT:
4960 if (gt_bias) {
4961 __ ColtD(0, lhs, rhs);
4962 } else {
4963 __ CultD(0, lhs, rhs);
4964 }
4965 __ LoadConst32(dst, 1);
4966 __ Movf(dst, ZERO, 0);
4967 break;
4968 case kCondLE:
4969 if (gt_bias) {
4970 __ ColeD(0, lhs, rhs);
4971 } else {
4972 __ CuleD(0, lhs, rhs);
4973 }
4974 __ LoadConst32(dst, 1);
4975 __ Movf(dst, ZERO, 0);
4976 break;
4977 case kCondGT:
4978 if (gt_bias) {
4979 __ CultD(0, rhs, lhs);
4980 } else {
4981 __ ColtD(0, rhs, lhs);
4982 }
4983 __ LoadConst32(dst, 1);
4984 __ Movf(dst, ZERO, 0);
4985 break;
4986 case kCondGE:
4987 if (gt_bias) {
4988 __ CuleD(0, rhs, lhs);
4989 } else {
4990 __ ColeD(0, rhs, lhs);
4991 }
4992 __ LoadConst32(dst, 1);
4993 __ Movf(dst, ZERO, 0);
4994 break;
4995 default:
4996 LOG(FATAL) << "Unexpected non-floating-point condition " << cond;
4997 UNREACHABLE();
4998 }
4999 }
5000 }
5001}
5002
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005003bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR2(IfCondition cond,
5004 bool gt_bias,
5005 Primitive::Type type,
5006 LocationSummary* input_locations,
5007 int cc) {
5008 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5009 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5010 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
5011 if (type == Primitive::kPrimFloat) {
5012 switch (cond) {
5013 case kCondEQ:
5014 __ CeqS(cc, lhs, rhs);
5015 return false;
5016 case kCondNE:
5017 __ CeqS(cc, lhs, rhs);
5018 return true;
5019 case kCondLT:
5020 if (gt_bias) {
5021 __ ColtS(cc, lhs, rhs);
5022 } else {
5023 __ CultS(cc, lhs, rhs);
5024 }
5025 return false;
5026 case kCondLE:
5027 if (gt_bias) {
5028 __ ColeS(cc, lhs, rhs);
5029 } else {
5030 __ CuleS(cc, lhs, rhs);
5031 }
5032 return false;
5033 case kCondGT:
5034 if (gt_bias) {
5035 __ CultS(cc, rhs, lhs);
5036 } else {
5037 __ ColtS(cc, rhs, lhs);
5038 }
5039 return false;
5040 case kCondGE:
5041 if (gt_bias) {
5042 __ CuleS(cc, rhs, lhs);
5043 } else {
5044 __ ColeS(cc, rhs, lhs);
5045 }
5046 return false;
5047 default:
5048 LOG(FATAL) << "Unexpected non-floating-point condition";
5049 UNREACHABLE();
5050 }
5051 } else {
5052 DCHECK_EQ(type, Primitive::kPrimDouble);
5053 switch (cond) {
5054 case kCondEQ:
5055 __ CeqD(cc, lhs, rhs);
5056 return false;
5057 case kCondNE:
5058 __ CeqD(cc, lhs, rhs);
5059 return true;
5060 case kCondLT:
5061 if (gt_bias) {
5062 __ ColtD(cc, lhs, rhs);
5063 } else {
5064 __ CultD(cc, lhs, rhs);
5065 }
5066 return false;
5067 case kCondLE:
5068 if (gt_bias) {
5069 __ ColeD(cc, lhs, rhs);
5070 } else {
5071 __ CuleD(cc, lhs, rhs);
5072 }
5073 return false;
5074 case kCondGT:
5075 if (gt_bias) {
5076 __ CultD(cc, rhs, lhs);
5077 } else {
5078 __ ColtD(cc, rhs, lhs);
5079 }
5080 return false;
5081 case kCondGE:
5082 if (gt_bias) {
5083 __ CuleD(cc, rhs, lhs);
5084 } else {
5085 __ ColeD(cc, rhs, lhs);
5086 }
5087 return false;
5088 default:
5089 LOG(FATAL) << "Unexpected non-floating-point condition";
5090 UNREACHABLE();
5091 }
5092 }
5093}
5094
5095bool InstructionCodeGeneratorMIPS::MaterializeFpCompareR6(IfCondition cond,
5096 bool gt_bias,
5097 Primitive::Type type,
5098 LocationSummary* input_locations,
5099 FRegister dst) {
5100 FRegister lhs = input_locations->InAt(0).AsFpuRegister<FRegister>();
5101 FRegister rhs = input_locations->InAt(1).AsFpuRegister<FRegister>();
5102 CHECK(codegen_->GetInstructionSetFeatures().IsR6());
5103 if (type == Primitive::kPrimFloat) {
5104 switch (cond) {
5105 case kCondEQ:
5106 __ CmpEqS(dst, lhs, rhs);
5107 return false;
5108 case kCondNE:
5109 __ CmpEqS(dst, lhs, rhs);
5110 return true;
5111 case kCondLT:
5112 if (gt_bias) {
5113 __ CmpLtS(dst, lhs, rhs);
5114 } else {
5115 __ CmpUltS(dst, lhs, rhs);
5116 }
5117 return false;
5118 case kCondLE:
5119 if (gt_bias) {
5120 __ CmpLeS(dst, lhs, rhs);
5121 } else {
5122 __ CmpUleS(dst, lhs, rhs);
5123 }
5124 return false;
5125 case kCondGT:
5126 if (gt_bias) {
5127 __ CmpUltS(dst, rhs, lhs);
5128 } else {
5129 __ CmpLtS(dst, rhs, lhs);
5130 }
5131 return false;
5132 case kCondGE:
5133 if (gt_bias) {
5134 __ CmpUleS(dst, rhs, lhs);
5135 } else {
5136 __ CmpLeS(dst, rhs, lhs);
5137 }
5138 return false;
5139 default:
5140 LOG(FATAL) << "Unexpected non-floating-point condition";
5141 UNREACHABLE();
5142 }
5143 } else {
5144 DCHECK_EQ(type, Primitive::kPrimDouble);
5145 switch (cond) {
5146 case kCondEQ:
5147 __ CmpEqD(dst, lhs, rhs);
5148 return false;
5149 case kCondNE:
5150 __ CmpEqD(dst, lhs, rhs);
5151 return true;
5152 case kCondLT:
5153 if (gt_bias) {
5154 __ CmpLtD(dst, lhs, rhs);
5155 } else {
5156 __ CmpUltD(dst, lhs, rhs);
5157 }
5158 return false;
5159 case kCondLE:
5160 if (gt_bias) {
5161 __ CmpLeD(dst, lhs, rhs);
5162 } else {
5163 __ CmpUleD(dst, lhs, rhs);
5164 }
5165 return false;
5166 case kCondGT:
5167 if (gt_bias) {
5168 __ CmpUltD(dst, rhs, lhs);
5169 } else {
5170 __ CmpLtD(dst, rhs, lhs);
5171 }
5172 return false;
5173 case kCondGE:
5174 if (gt_bias) {
5175 __ CmpUleD(dst, rhs, lhs);
5176 } else {
5177 __ CmpLeD(dst, rhs, lhs);
5178 }
5179 return false;
5180 default:
5181 LOG(FATAL) << "Unexpected non-floating-point condition";
5182 UNREACHABLE();
5183 }
5184 }
5185}
5186
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005187void InstructionCodeGeneratorMIPS::GenerateFpCompareAndBranch(IfCondition cond,
5188 bool gt_bias,
5189 Primitive::Type type,
5190 LocationSummary* locations,
5191 MipsLabel* label) {
5192 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
5193 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
5194 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
5195 if (type == Primitive::kPrimFloat) {
5196 if (isR6) {
5197 switch (cond) {
5198 case kCondEQ:
5199 __ CmpEqS(FTMP, lhs, rhs);
5200 __ Bc1nez(FTMP, label);
5201 break;
5202 case kCondNE:
5203 __ CmpEqS(FTMP, lhs, rhs);
5204 __ Bc1eqz(FTMP, label);
5205 break;
5206 case kCondLT:
5207 if (gt_bias) {
5208 __ CmpLtS(FTMP, lhs, rhs);
5209 } else {
5210 __ CmpUltS(FTMP, lhs, rhs);
5211 }
5212 __ Bc1nez(FTMP, label);
5213 break;
5214 case kCondLE:
5215 if (gt_bias) {
5216 __ CmpLeS(FTMP, lhs, rhs);
5217 } else {
5218 __ CmpUleS(FTMP, lhs, rhs);
5219 }
5220 __ Bc1nez(FTMP, label);
5221 break;
5222 case kCondGT:
5223 if (gt_bias) {
5224 __ CmpUltS(FTMP, rhs, lhs);
5225 } else {
5226 __ CmpLtS(FTMP, rhs, lhs);
5227 }
5228 __ Bc1nez(FTMP, label);
5229 break;
5230 case kCondGE:
5231 if (gt_bias) {
5232 __ CmpUleS(FTMP, rhs, lhs);
5233 } else {
5234 __ CmpLeS(FTMP, rhs, lhs);
5235 }
5236 __ Bc1nez(FTMP, label);
5237 break;
5238 default:
5239 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005240 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005241 }
5242 } else {
5243 switch (cond) {
5244 case kCondEQ:
5245 __ CeqS(0, lhs, rhs);
5246 __ Bc1t(0, label);
5247 break;
5248 case kCondNE:
5249 __ CeqS(0, lhs, rhs);
5250 __ Bc1f(0, label);
5251 break;
5252 case kCondLT:
5253 if (gt_bias) {
5254 __ ColtS(0, lhs, rhs);
5255 } else {
5256 __ CultS(0, lhs, rhs);
5257 }
5258 __ Bc1t(0, label);
5259 break;
5260 case kCondLE:
5261 if (gt_bias) {
5262 __ ColeS(0, lhs, rhs);
5263 } else {
5264 __ CuleS(0, lhs, rhs);
5265 }
5266 __ Bc1t(0, label);
5267 break;
5268 case kCondGT:
5269 if (gt_bias) {
5270 __ CultS(0, rhs, lhs);
5271 } else {
5272 __ ColtS(0, rhs, lhs);
5273 }
5274 __ Bc1t(0, label);
5275 break;
5276 case kCondGE:
5277 if (gt_bias) {
5278 __ CuleS(0, rhs, lhs);
5279 } else {
5280 __ ColeS(0, rhs, lhs);
5281 }
5282 __ Bc1t(0, label);
5283 break;
5284 default:
5285 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005286 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005287 }
5288 }
5289 } else {
5290 DCHECK_EQ(type, Primitive::kPrimDouble);
5291 if (isR6) {
5292 switch (cond) {
5293 case kCondEQ:
5294 __ CmpEqD(FTMP, lhs, rhs);
5295 __ Bc1nez(FTMP, label);
5296 break;
5297 case kCondNE:
5298 __ CmpEqD(FTMP, lhs, rhs);
5299 __ Bc1eqz(FTMP, label);
5300 break;
5301 case kCondLT:
5302 if (gt_bias) {
5303 __ CmpLtD(FTMP, lhs, rhs);
5304 } else {
5305 __ CmpUltD(FTMP, lhs, rhs);
5306 }
5307 __ Bc1nez(FTMP, label);
5308 break;
5309 case kCondLE:
5310 if (gt_bias) {
5311 __ CmpLeD(FTMP, lhs, rhs);
5312 } else {
5313 __ CmpUleD(FTMP, lhs, rhs);
5314 }
5315 __ Bc1nez(FTMP, label);
5316 break;
5317 case kCondGT:
5318 if (gt_bias) {
5319 __ CmpUltD(FTMP, rhs, lhs);
5320 } else {
5321 __ CmpLtD(FTMP, rhs, lhs);
5322 }
5323 __ Bc1nez(FTMP, label);
5324 break;
5325 case kCondGE:
5326 if (gt_bias) {
5327 __ CmpUleD(FTMP, rhs, lhs);
5328 } else {
5329 __ CmpLeD(FTMP, rhs, lhs);
5330 }
5331 __ Bc1nez(FTMP, label);
5332 break;
5333 default:
5334 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005335 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005336 }
5337 } else {
5338 switch (cond) {
5339 case kCondEQ:
5340 __ CeqD(0, lhs, rhs);
5341 __ Bc1t(0, label);
5342 break;
5343 case kCondNE:
5344 __ CeqD(0, lhs, rhs);
5345 __ Bc1f(0, label);
5346 break;
5347 case kCondLT:
5348 if (gt_bias) {
5349 __ ColtD(0, lhs, rhs);
5350 } else {
5351 __ CultD(0, lhs, rhs);
5352 }
5353 __ Bc1t(0, label);
5354 break;
5355 case kCondLE:
5356 if (gt_bias) {
5357 __ ColeD(0, lhs, rhs);
5358 } else {
5359 __ CuleD(0, lhs, rhs);
5360 }
5361 __ Bc1t(0, label);
5362 break;
5363 case kCondGT:
5364 if (gt_bias) {
5365 __ CultD(0, rhs, lhs);
5366 } else {
5367 __ ColtD(0, rhs, lhs);
5368 }
5369 __ Bc1t(0, label);
5370 break;
5371 case kCondGE:
5372 if (gt_bias) {
5373 __ CuleD(0, rhs, lhs);
5374 } else {
5375 __ ColeD(0, rhs, lhs);
5376 }
5377 __ Bc1t(0, label);
5378 break;
5379 default:
5380 LOG(FATAL) << "Unexpected non-floating-point condition";
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005381 UNREACHABLE();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005382 }
5383 }
5384 }
5385}
5386
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005387void InstructionCodeGeneratorMIPS::GenerateTestAndBranch(HInstruction* instruction,
David Brazdil0debae72015-11-12 18:37:00 +00005388 size_t condition_input_index,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005389 MipsLabel* true_target,
David Brazdil0debae72015-11-12 18:37:00 +00005390 MipsLabel* false_target) {
5391 HInstruction* cond = instruction->InputAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005392
David Brazdil0debae72015-11-12 18:37:00 +00005393 if (true_target == nullptr && false_target == nullptr) {
5394 // Nothing to do. The code always falls through.
5395 return;
5396 } else if (cond->IsIntConstant()) {
Roland Levillain1a653882016-03-18 18:05:57 +00005397 // Constant condition, statically compared against "true" (integer value 1).
5398 if (cond->AsIntConstant()->IsTrue()) {
David Brazdil0debae72015-11-12 18:37:00 +00005399 if (true_target != nullptr) {
5400 __ B(true_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005401 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005402 } else {
Roland Levillain1a653882016-03-18 18:05:57 +00005403 DCHECK(cond->AsIntConstant()->IsFalse()) << cond->AsIntConstant()->GetValue();
David Brazdil0debae72015-11-12 18:37:00 +00005404 if (false_target != nullptr) {
5405 __ B(false_target);
5406 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005407 }
David Brazdil0debae72015-11-12 18:37:00 +00005408 return;
5409 }
5410
5411 // The following code generates these patterns:
5412 // (1) true_target == nullptr && false_target != nullptr
5413 // - opposite condition true => branch to false_target
5414 // (2) true_target != nullptr && false_target == nullptr
5415 // - condition true => branch to true_target
5416 // (3) true_target != nullptr && false_target != nullptr
5417 // - condition true => branch to true_target
5418 // - branch to false_target
5419 if (IsBooleanValueOrMaterializedCondition(cond)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005420 // The condition instruction has been materialized, compare the output to 0.
David Brazdil0debae72015-11-12 18:37:00 +00005421 Location cond_val = instruction->GetLocations()->InAt(condition_input_index);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005422 DCHECK(cond_val.IsRegister());
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005423 if (true_target == nullptr) {
David Brazdil0debae72015-11-12 18:37:00 +00005424 __ Beqz(cond_val.AsRegister<Register>(), false_target);
5425 } else {
5426 __ Bnez(cond_val.AsRegister<Register>(), true_target);
5427 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005428 } else {
5429 // The condition instruction has not been materialized, use its inputs as
5430 // the comparison and its condition as the branch condition.
David Brazdil0debae72015-11-12 18:37:00 +00005431 HCondition* condition = cond->AsCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005432 Primitive::Type type = condition->InputAt(0)->GetType();
5433 LocationSummary* locations = cond->GetLocations();
5434 IfCondition if_cond = condition->GetCondition();
5435 MipsLabel* branch_target = true_target;
David Brazdil0debae72015-11-12 18:37:00 +00005436
David Brazdil0debae72015-11-12 18:37:00 +00005437 if (true_target == nullptr) {
5438 if_cond = condition->GetOppositeCondition();
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005439 branch_target = false_target;
David Brazdil0debae72015-11-12 18:37:00 +00005440 }
5441
Alexey Frunzecd7b0ee2015-12-03 16:46:38 -08005442 switch (type) {
5443 default:
5444 GenerateIntCompareAndBranch(if_cond, locations, branch_target);
5445 break;
5446 case Primitive::kPrimLong:
5447 GenerateLongCompareAndBranch(if_cond, locations, branch_target);
5448 break;
5449 case Primitive::kPrimFloat:
5450 case Primitive::kPrimDouble:
5451 GenerateFpCompareAndBranch(if_cond, condition->IsGtBias(), type, locations, branch_target);
5452 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005453 }
5454 }
David Brazdil0debae72015-11-12 18:37:00 +00005455
5456 // If neither branch falls through (case 3), the conditional branch to `true_target`
5457 // was already emitted (case 2) and we need to emit a jump to `false_target`.
5458 if (true_target != nullptr && false_target != nullptr) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005459 __ B(false_target);
5460 }
5461}
5462
5463void LocationsBuilderMIPS::VisitIf(HIf* if_instr) {
5464 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
David Brazdil0debae72015-11-12 18:37:00 +00005465 if (IsBooleanValueOrMaterializedCondition(if_instr->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005466 locations->SetInAt(0, Location::RequiresRegister());
5467 }
5468}
5469
5470void InstructionCodeGeneratorMIPS::VisitIf(HIf* if_instr) {
David Brazdil0debae72015-11-12 18:37:00 +00005471 HBasicBlock* true_successor = if_instr->IfTrueSuccessor();
5472 HBasicBlock* false_successor = if_instr->IfFalseSuccessor();
5473 MipsLabel* true_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), true_successor) ?
5474 nullptr : codegen_->GetLabelOf(true_successor);
5475 MipsLabel* false_target = codegen_->GoesToNextBlock(if_instr->GetBlock(), false_successor) ?
5476 nullptr : codegen_->GetLabelOf(false_successor);
5477 GenerateTestAndBranch(if_instr, /* condition_input_index */ 0, true_target, false_target);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005478}
5479
5480void LocationsBuilderMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
5481 LocationSummary* locations = new (GetGraph()->GetArena())
5482 LocationSummary(deoptimize, LocationSummary::kCallOnSlowPath);
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01005483 InvokeRuntimeCallingConvention calling_convention;
5484 RegisterSet caller_saves = RegisterSet::Empty();
5485 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
5486 locations->SetCustomSlowPathCallerSaves(caller_saves);
David Brazdil0debae72015-11-12 18:37:00 +00005487 if (IsBooleanValueOrMaterializedCondition(deoptimize->InputAt(0))) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005488 locations->SetInAt(0, Location::RequiresRegister());
5489 }
5490}
5491
5492void InstructionCodeGeneratorMIPS::VisitDeoptimize(HDeoptimize* deoptimize) {
Aart Bik42249c32016-01-07 15:33:50 -08005493 SlowPathCodeMIPS* slow_path =
5494 deopt_slow_paths_.NewSlowPath<DeoptimizationSlowPathMIPS>(deoptimize);
David Brazdil0debae72015-11-12 18:37:00 +00005495 GenerateTestAndBranch(deoptimize,
5496 /* condition_input_index */ 0,
5497 slow_path->GetEntryLabel(),
5498 /* false_target */ nullptr);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02005499}
5500
Alexey Frunze674b9ee2016-09-20 14:54:15 -07005501// This function returns true if a conditional move can be generated for HSelect.
5502// Otherwise it returns false and HSelect must be implemented in terms of conditonal
5503// branches and regular moves.
5504//
5505// If `locations_to_set` isn't nullptr, its inputs and outputs are set for HSelect.
5506//
5507// While determining feasibility of a conditional move and setting inputs/outputs
5508// are two distinct tasks, this function does both because they share quite a bit
5509// of common logic.
5510static bool CanMoveConditionally(HSelect* select, bool is_r6, LocationSummary* locations_to_set) {
5511 bool materialized = IsBooleanValueOrMaterializedCondition(select->GetCondition());
5512 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5513 HCondition* condition = cond->AsCondition();
5514
5515 Primitive::Type cond_type = materialized ? Primitive::kPrimInt : condition->InputAt(0)->GetType();
5516 Primitive::Type dst_type = select->GetType();
5517
5518 HConstant* cst_true_value = select->GetTrueValue()->AsConstant();
5519 HConstant* cst_false_value = select->GetFalseValue()->AsConstant();
5520 bool is_true_value_zero_constant =
5521 (cst_true_value != nullptr && cst_true_value->IsZeroBitPattern());
5522 bool is_false_value_zero_constant =
5523 (cst_false_value != nullptr && cst_false_value->IsZeroBitPattern());
5524
5525 bool can_move_conditionally = false;
5526 bool use_const_for_false_in = false;
5527 bool use_const_for_true_in = false;
5528
5529 if (!cond->IsConstant()) {
5530 switch (cond_type) {
5531 default:
5532 switch (dst_type) {
5533 default:
5534 // Moving int on int condition.
5535 if (is_r6) {
5536 if (is_true_value_zero_constant) {
5537 // seleqz out_reg, false_reg, cond_reg
5538 can_move_conditionally = true;
5539 use_const_for_true_in = true;
5540 } else if (is_false_value_zero_constant) {
5541 // selnez out_reg, true_reg, cond_reg
5542 can_move_conditionally = true;
5543 use_const_for_false_in = true;
5544 } else if (materialized) {
5545 // Not materializing unmaterialized int conditions
5546 // to keep the instruction count low.
5547 // selnez AT, true_reg, cond_reg
5548 // seleqz TMP, false_reg, cond_reg
5549 // or out_reg, AT, TMP
5550 can_move_conditionally = true;
5551 }
5552 } else {
5553 // movn out_reg, true_reg/ZERO, cond_reg
5554 can_move_conditionally = true;
5555 use_const_for_true_in = is_true_value_zero_constant;
5556 }
5557 break;
5558 case Primitive::kPrimLong:
5559 // Moving long on int condition.
5560 if (is_r6) {
5561 if (is_true_value_zero_constant) {
5562 // seleqz out_reg_lo, false_reg_lo, cond_reg
5563 // seleqz out_reg_hi, false_reg_hi, cond_reg
5564 can_move_conditionally = true;
5565 use_const_for_true_in = true;
5566 } else if (is_false_value_zero_constant) {
5567 // selnez out_reg_lo, true_reg_lo, cond_reg
5568 // selnez out_reg_hi, true_reg_hi, cond_reg
5569 can_move_conditionally = true;
5570 use_const_for_false_in = true;
5571 }
5572 // Other long conditional moves would generate 6+ instructions,
5573 // which is too many.
5574 } else {
5575 // movn out_reg_lo, true_reg_lo/ZERO, cond_reg
5576 // movn out_reg_hi, true_reg_hi/ZERO, cond_reg
5577 can_move_conditionally = true;
5578 use_const_for_true_in = is_true_value_zero_constant;
5579 }
5580 break;
5581 case Primitive::kPrimFloat:
5582 case Primitive::kPrimDouble:
5583 // Moving float/double on int condition.
5584 if (is_r6) {
5585 if (materialized) {
5586 // Not materializing unmaterialized int conditions
5587 // to keep the instruction count low.
5588 can_move_conditionally = true;
5589 if (is_true_value_zero_constant) {
5590 // sltu TMP, ZERO, cond_reg
5591 // mtc1 TMP, temp_cond_reg
5592 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5593 use_const_for_true_in = true;
5594 } else if (is_false_value_zero_constant) {
5595 // sltu TMP, ZERO, cond_reg
5596 // mtc1 TMP, temp_cond_reg
5597 // selnez.fmt out_reg, true_reg, temp_cond_reg
5598 use_const_for_false_in = true;
5599 } else {
5600 // sltu TMP, ZERO, cond_reg
5601 // mtc1 TMP, temp_cond_reg
5602 // sel.fmt temp_cond_reg, false_reg, true_reg
5603 // mov.fmt out_reg, temp_cond_reg
5604 }
5605 }
5606 } else {
5607 // movn.fmt out_reg, true_reg, cond_reg
5608 can_move_conditionally = true;
5609 }
5610 break;
5611 }
5612 break;
5613 case Primitive::kPrimLong:
5614 // We don't materialize long comparison now
5615 // and use conditional branches instead.
5616 break;
5617 case Primitive::kPrimFloat:
5618 case Primitive::kPrimDouble:
5619 switch (dst_type) {
5620 default:
5621 // Moving int on float/double condition.
5622 if (is_r6) {
5623 if (is_true_value_zero_constant) {
5624 // mfc1 TMP, temp_cond_reg
5625 // seleqz out_reg, false_reg, TMP
5626 can_move_conditionally = true;
5627 use_const_for_true_in = true;
5628 } else if (is_false_value_zero_constant) {
5629 // mfc1 TMP, temp_cond_reg
5630 // selnez out_reg, true_reg, TMP
5631 can_move_conditionally = true;
5632 use_const_for_false_in = true;
5633 } else {
5634 // mfc1 TMP, temp_cond_reg
5635 // selnez AT, true_reg, TMP
5636 // seleqz TMP, false_reg, TMP
5637 // or out_reg, AT, TMP
5638 can_move_conditionally = true;
5639 }
5640 } else {
5641 // movt out_reg, true_reg/ZERO, cc
5642 can_move_conditionally = true;
5643 use_const_for_true_in = is_true_value_zero_constant;
5644 }
5645 break;
5646 case Primitive::kPrimLong:
5647 // Moving long on float/double condition.
5648 if (is_r6) {
5649 if (is_true_value_zero_constant) {
5650 // mfc1 TMP, temp_cond_reg
5651 // seleqz out_reg_lo, false_reg_lo, TMP
5652 // seleqz out_reg_hi, false_reg_hi, TMP
5653 can_move_conditionally = true;
5654 use_const_for_true_in = true;
5655 } else if (is_false_value_zero_constant) {
5656 // mfc1 TMP, temp_cond_reg
5657 // selnez out_reg_lo, true_reg_lo, TMP
5658 // selnez out_reg_hi, true_reg_hi, TMP
5659 can_move_conditionally = true;
5660 use_const_for_false_in = true;
5661 }
5662 // Other long conditional moves would generate 6+ instructions,
5663 // which is too many.
5664 } else {
5665 // movt out_reg_lo, true_reg_lo/ZERO, cc
5666 // movt out_reg_hi, true_reg_hi/ZERO, cc
5667 can_move_conditionally = true;
5668 use_const_for_true_in = is_true_value_zero_constant;
5669 }
5670 break;
5671 case Primitive::kPrimFloat:
5672 case Primitive::kPrimDouble:
5673 // Moving float/double on float/double condition.
5674 if (is_r6) {
5675 can_move_conditionally = true;
5676 if (is_true_value_zero_constant) {
5677 // seleqz.fmt out_reg, false_reg, temp_cond_reg
5678 use_const_for_true_in = true;
5679 } else if (is_false_value_zero_constant) {
5680 // selnez.fmt out_reg, true_reg, temp_cond_reg
5681 use_const_for_false_in = true;
5682 } else {
5683 // sel.fmt temp_cond_reg, false_reg, true_reg
5684 // mov.fmt out_reg, temp_cond_reg
5685 }
5686 } else {
5687 // movt.fmt out_reg, true_reg, cc
5688 can_move_conditionally = true;
5689 }
5690 break;
5691 }
5692 break;
5693 }
5694 }
5695
5696 if (can_move_conditionally) {
5697 DCHECK(!use_const_for_false_in || !use_const_for_true_in);
5698 } else {
5699 DCHECK(!use_const_for_false_in);
5700 DCHECK(!use_const_for_true_in);
5701 }
5702
5703 if (locations_to_set != nullptr) {
5704 if (use_const_for_false_in) {
5705 locations_to_set->SetInAt(0, Location::ConstantLocation(cst_false_value));
5706 } else {
5707 locations_to_set->SetInAt(0,
5708 Primitive::IsFloatingPointType(dst_type)
5709 ? Location::RequiresFpuRegister()
5710 : Location::RequiresRegister());
5711 }
5712 if (use_const_for_true_in) {
5713 locations_to_set->SetInAt(1, Location::ConstantLocation(cst_true_value));
5714 } else {
5715 locations_to_set->SetInAt(1,
5716 Primitive::IsFloatingPointType(dst_type)
5717 ? Location::RequiresFpuRegister()
5718 : Location::RequiresRegister());
5719 }
5720 if (materialized) {
5721 locations_to_set->SetInAt(2, Location::RequiresRegister());
5722 }
5723 // On R6 we don't require the output to be the same as the
5724 // first input for conditional moves unlike on R2.
5725 bool is_out_same_as_first_in = !can_move_conditionally || !is_r6;
5726 if (is_out_same_as_first_in) {
5727 locations_to_set->SetOut(Location::SameAsFirstInput());
5728 } else {
5729 locations_to_set->SetOut(Primitive::IsFloatingPointType(dst_type)
5730 ? Location::RequiresFpuRegister()
5731 : Location::RequiresRegister());
5732 }
5733 }
5734
5735 return can_move_conditionally;
5736}
5737
5738void InstructionCodeGeneratorMIPS::GenConditionalMoveR2(HSelect* select) {
5739 LocationSummary* locations = select->GetLocations();
5740 Location dst = locations->Out();
5741 Location src = locations->InAt(1);
5742 Register src_reg = ZERO;
5743 Register src_reg_high = ZERO;
5744 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5745 Register cond_reg = TMP;
5746 int cond_cc = 0;
5747 Primitive::Type cond_type = Primitive::kPrimInt;
5748 bool cond_inverted = false;
5749 Primitive::Type dst_type = select->GetType();
5750
5751 if (IsBooleanValueOrMaterializedCondition(cond)) {
5752 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5753 } else {
5754 HCondition* condition = cond->AsCondition();
5755 LocationSummary* cond_locations = cond->GetLocations();
5756 IfCondition if_cond = condition->GetCondition();
5757 cond_type = condition->InputAt(0)->GetType();
5758 switch (cond_type) {
5759 default:
5760 DCHECK_NE(cond_type, Primitive::kPrimLong);
5761 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5762 break;
5763 case Primitive::kPrimFloat:
5764 case Primitive::kPrimDouble:
5765 cond_inverted = MaterializeFpCompareR2(if_cond,
5766 condition->IsGtBias(),
5767 cond_type,
5768 cond_locations,
5769 cond_cc);
5770 break;
5771 }
5772 }
5773
5774 DCHECK(dst.Equals(locations->InAt(0)));
5775 if (src.IsRegister()) {
5776 src_reg = src.AsRegister<Register>();
5777 } else if (src.IsRegisterPair()) {
5778 src_reg = src.AsRegisterPairLow<Register>();
5779 src_reg_high = src.AsRegisterPairHigh<Register>();
5780 } else if (src.IsConstant()) {
5781 DCHECK(src.GetConstant()->IsZeroBitPattern());
5782 }
5783
5784 switch (cond_type) {
5785 default:
5786 switch (dst_type) {
5787 default:
5788 if (cond_inverted) {
5789 __ Movz(dst.AsRegister<Register>(), src_reg, cond_reg);
5790 } else {
5791 __ Movn(dst.AsRegister<Register>(), src_reg, cond_reg);
5792 }
5793 break;
5794 case Primitive::kPrimLong:
5795 if (cond_inverted) {
5796 __ Movz(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5797 __ Movz(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5798 } else {
5799 __ Movn(dst.AsRegisterPairLow<Register>(), src_reg, cond_reg);
5800 __ Movn(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_reg);
5801 }
5802 break;
5803 case Primitive::kPrimFloat:
5804 if (cond_inverted) {
5805 __ MovzS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5806 } else {
5807 __ MovnS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5808 }
5809 break;
5810 case Primitive::kPrimDouble:
5811 if (cond_inverted) {
5812 __ MovzD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5813 } else {
5814 __ MovnD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_reg);
5815 }
5816 break;
5817 }
5818 break;
5819 case Primitive::kPrimLong:
5820 LOG(FATAL) << "Unreachable";
5821 UNREACHABLE();
5822 case Primitive::kPrimFloat:
5823 case Primitive::kPrimDouble:
5824 switch (dst_type) {
5825 default:
5826 if (cond_inverted) {
5827 __ Movf(dst.AsRegister<Register>(), src_reg, cond_cc);
5828 } else {
5829 __ Movt(dst.AsRegister<Register>(), src_reg, cond_cc);
5830 }
5831 break;
5832 case Primitive::kPrimLong:
5833 if (cond_inverted) {
5834 __ Movf(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5835 __ Movf(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5836 } else {
5837 __ Movt(dst.AsRegisterPairLow<Register>(), src_reg, cond_cc);
5838 __ Movt(dst.AsRegisterPairHigh<Register>(), src_reg_high, cond_cc);
5839 }
5840 break;
5841 case Primitive::kPrimFloat:
5842 if (cond_inverted) {
5843 __ MovfS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5844 } else {
5845 __ MovtS(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5846 }
5847 break;
5848 case Primitive::kPrimDouble:
5849 if (cond_inverted) {
5850 __ MovfD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5851 } else {
5852 __ MovtD(dst.AsFpuRegister<FRegister>(), src.AsFpuRegister<FRegister>(), cond_cc);
5853 }
5854 break;
5855 }
5856 break;
5857 }
5858}
5859
5860void InstructionCodeGeneratorMIPS::GenConditionalMoveR6(HSelect* select) {
5861 LocationSummary* locations = select->GetLocations();
5862 Location dst = locations->Out();
5863 Location false_src = locations->InAt(0);
5864 Location true_src = locations->InAt(1);
5865 HInstruction* cond = select->InputAt(/* condition_input_index */ 2);
5866 Register cond_reg = TMP;
5867 FRegister fcond_reg = FTMP;
5868 Primitive::Type cond_type = Primitive::kPrimInt;
5869 bool cond_inverted = false;
5870 Primitive::Type dst_type = select->GetType();
5871
5872 if (IsBooleanValueOrMaterializedCondition(cond)) {
5873 cond_reg = locations->InAt(/* condition_input_index */ 2).AsRegister<Register>();
5874 } else {
5875 HCondition* condition = cond->AsCondition();
5876 LocationSummary* cond_locations = cond->GetLocations();
5877 IfCondition if_cond = condition->GetCondition();
5878 cond_type = condition->InputAt(0)->GetType();
5879 switch (cond_type) {
5880 default:
5881 DCHECK_NE(cond_type, Primitive::kPrimLong);
5882 cond_inverted = MaterializeIntCompare(if_cond, cond_locations, cond_reg);
5883 break;
5884 case Primitive::kPrimFloat:
5885 case Primitive::kPrimDouble:
5886 cond_inverted = MaterializeFpCompareR6(if_cond,
5887 condition->IsGtBias(),
5888 cond_type,
5889 cond_locations,
5890 fcond_reg);
5891 break;
5892 }
5893 }
5894
5895 if (true_src.IsConstant()) {
5896 DCHECK(true_src.GetConstant()->IsZeroBitPattern());
5897 }
5898 if (false_src.IsConstant()) {
5899 DCHECK(false_src.GetConstant()->IsZeroBitPattern());
5900 }
5901
5902 switch (dst_type) {
5903 default:
5904 if (Primitive::IsFloatingPointType(cond_type)) {
5905 __ Mfc1(cond_reg, fcond_reg);
5906 }
5907 if (true_src.IsConstant()) {
5908 if (cond_inverted) {
5909 __ Selnez(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5910 } else {
5911 __ Seleqz(dst.AsRegister<Register>(), false_src.AsRegister<Register>(), cond_reg);
5912 }
5913 } else if (false_src.IsConstant()) {
5914 if (cond_inverted) {
5915 __ Seleqz(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5916 } else {
5917 __ Selnez(dst.AsRegister<Register>(), true_src.AsRegister<Register>(), cond_reg);
5918 }
5919 } else {
5920 DCHECK_NE(cond_reg, AT);
5921 if (cond_inverted) {
5922 __ Seleqz(AT, true_src.AsRegister<Register>(), cond_reg);
5923 __ Selnez(TMP, false_src.AsRegister<Register>(), cond_reg);
5924 } else {
5925 __ Selnez(AT, true_src.AsRegister<Register>(), cond_reg);
5926 __ Seleqz(TMP, false_src.AsRegister<Register>(), cond_reg);
5927 }
5928 __ Or(dst.AsRegister<Register>(), AT, TMP);
5929 }
5930 break;
5931 case Primitive::kPrimLong: {
5932 if (Primitive::IsFloatingPointType(cond_type)) {
5933 __ Mfc1(cond_reg, fcond_reg);
5934 }
5935 Register dst_lo = dst.AsRegisterPairLow<Register>();
5936 Register dst_hi = dst.AsRegisterPairHigh<Register>();
5937 if (true_src.IsConstant()) {
5938 Register src_lo = false_src.AsRegisterPairLow<Register>();
5939 Register src_hi = false_src.AsRegisterPairHigh<Register>();
5940 if (cond_inverted) {
5941 __ Selnez(dst_lo, src_lo, cond_reg);
5942 __ Selnez(dst_hi, src_hi, cond_reg);
5943 } else {
5944 __ Seleqz(dst_lo, src_lo, cond_reg);
5945 __ Seleqz(dst_hi, src_hi, cond_reg);
5946 }
5947 } else {
5948 DCHECK(false_src.IsConstant());
5949 Register src_lo = true_src.AsRegisterPairLow<Register>();
5950 Register src_hi = true_src.AsRegisterPairHigh<Register>();
5951 if (cond_inverted) {
5952 __ Seleqz(dst_lo, src_lo, cond_reg);
5953 __ Seleqz(dst_hi, src_hi, cond_reg);
5954 } else {
5955 __ Selnez(dst_lo, src_lo, cond_reg);
5956 __ Selnez(dst_hi, src_hi, cond_reg);
5957 }
5958 }
5959 break;
5960 }
5961 case Primitive::kPrimFloat: {
5962 if (!Primitive::IsFloatingPointType(cond_type)) {
5963 // sel*.fmt tests bit 0 of the condition register, account for that.
5964 __ Sltu(TMP, ZERO, cond_reg);
5965 __ Mtc1(TMP, fcond_reg);
5966 }
5967 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
5968 if (true_src.IsConstant()) {
5969 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
5970 if (cond_inverted) {
5971 __ SelnezS(dst_reg, src_reg, fcond_reg);
5972 } else {
5973 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5974 }
5975 } else if (false_src.IsConstant()) {
5976 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
5977 if (cond_inverted) {
5978 __ SeleqzS(dst_reg, src_reg, fcond_reg);
5979 } else {
5980 __ SelnezS(dst_reg, src_reg, fcond_reg);
5981 }
5982 } else {
5983 if (cond_inverted) {
5984 __ SelS(fcond_reg,
5985 true_src.AsFpuRegister<FRegister>(),
5986 false_src.AsFpuRegister<FRegister>());
5987 } else {
5988 __ SelS(fcond_reg,
5989 false_src.AsFpuRegister<FRegister>(),
5990 true_src.AsFpuRegister<FRegister>());
5991 }
5992 __ MovS(dst_reg, fcond_reg);
5993 }
5994 break;
5995 }
5996 case Primitive::kPrimDouble: {
5997 if (!Primitive::IsFloatingPointType(cond_type)) {
5998 // sel*.fmt tests bit 0 of the condition register, account for that.
5999 __ Sltu(TMP, ZERO, cond_reg);
6000 __ Mtc1(TMP, fcond_reg);
6001 }
6002 FRegister dst_reg = dst.AsFpuRegister<FRegister>();
6003 if (true_src.IsConstant()) {
6004 FRegister src_reg = false_src.AsFpuRegister<FRegister>();
6005 if (cond_inverted) {
6006 __ SelnezD(dst_reg, src_reg, fcond_reg);
6007 } else {
6008 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6009 }
6010 } else if (false_src.IsConstant()) {
6011 FRegister src_reg = true_src.AsFpuRegister<FRegister>();
6012 if (cond_inverted) {
6013 __ SeleqzD(dst_reg, src_reg, fcond_reg);
6014 } else {
6015 __ SelnezD(dst_reg, src_reg, fcond_reg);
6016 }
6017 } else {
6018 if (cond_inverted) {
6019 __ SelD(fcond_reg,
6020 true_src.AsFpuRegister<FRegister>(),
6021 false_src.AsFpuRegister<FRegister>());
6022 } else {
6023 __ SelD(fcond_reg,
6024 false_src.AsFpuRegister<FRegister>(),
6025 true_src.AsFpuRegister<FRegister>());
6026 }
6027 __ MovD(dst_reg, fcond_reg);
6028 }
6029 break;
6030 }
6031 }
6032}
6033
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006034void LocationsBuilderMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6035 LocationSummary* locations = new (GetGraph()->GetArena())
6036 LocationSummary(flag, LocationSummary::kNoCall);
6037 locations->SetOut(Location::RequiresRegister());
Mingyao Yang063fc772016-08-02 11:02:54 -07006038}
6039
Goran Jakovljevicc6418422016-12-05 16:31:55 +01006040void InstructionCodeGeneratorMIPS::VisitShouldDeoptimizeFlag(HShouldDeoptimizeFlag* flag) {
6041 __ LoadFromOffset(kLoadWord,
6042 flag->GetLocations()->Out().AsRegister<Register>(),
6043 SP,
6044 codegen_->GetStackOffsetOfShouldDeoptimizeFlag());
Mingyao Yang063fc772016-08-02 11:02:54 -07006045}
6046
David Brazdil74eb1b22015-12-14 11:44:01 +00006047void LocationsBuilderMIPS::VisitSelect(HSelect* select) {
6048 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(select);
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006049 CanMoveConditionally(select, codegen_->GetInstructionSetFeatures().IsR6(), locations);
David Brazdil74eb1b22015-12-14 11:44:01 +00006050}
6051
6052void InstructionCodeGeneratorMIPS::VisitSelect(HSelect* select) {
Alexey Frunze674b9ee2016-09-20 14:54:15 -07006053 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
6054 if (CanMoveConditionally(select, is_r6, /* locations_to_set */ nullptr)) {
6055 if (is_r6) {
6056 GenConditionalMoveR6(select);
6057 } else {
6058 GenConditionalMoveR2(select);
6059 }
6060 } else {
6061 LocationSummary* locations = select->GetLocations();
6062 MipsLabel false_target;
6063 GenerateTestAndBranch(select,
6064 /* condition_input_index */ 2,
6065 /* true_target */ nullptr,
6066 &false_target);
6067 codegen_->MoveLocation(locations->Out(), locations->InAt(1), select->GetType());
6068 __ Bind(&false_target);
6069 }
David Brazdil74eb1b22015-12-14 11:44:01 +00006070}
6071
David Srbecky0cf44932015-12-09 14:09:59 +00006072void LocationsBuilderMIPS::VisitNativeDebugInfo(HNativeDebugInfo* info) {
6073 new (GetGraph()->GetArena()) LocationSummary(info);
6074}
6075
David Srbeckyd28f4a02016-03-14 17:14:24 +00006076void InstructionCodeGeneratorMIPS::VisitNativeDebugInfo(HNativeDebugInfo*) {
6077 // MaybeRecordNativeDebugInfo is already called implicitly in CodeGenerator::Compile.
David Srbeckyc7098ff2016-02-09 14:30:11 +00006078}
6079
6080void CodeGeneratorMIPS::GenerateNop() {
6081 __ Nop();
David Srbecky0cf44932015-12-09 14:09:59 +00006082}
6083
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006084void LocationsBuilderMIPS::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) {
6085 Primitive::Type field_type = field_info.GetFieldType();
6086 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6087 bool generate_volatile = field_info.IsVolatile() && is_wide;
Alexey Frunze15958152017-02-09 19:08:30 -08006088 bool object_field_get_with_read_barrier =
6089 kEmitCompilerReadBarrier && (field_type == Primitive::kPrimNot);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006090 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Alexey Frunze15958152017-02-09 19:08:30 -08006091 instruction,
6092 generate_volatile
6093 ? LocationSummary::kCallOnMainOnly
6094 : (object_field_get_with_read_barrier
6095 ? LocationSummary::kCallOnSlowPath
6096 : LocationSummary::kNoCall));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006097
Alexey Frunzec61c0762017-04-10 13:54:23 -07006098 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6099 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
6100 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006101 locations->SetInAt(0, Location::RequiresRegister());
6102 if (generate_volatile) {
6103 InvokeRuntimeCallingConvention calling_convention;
6104 // need A0 to hold base + offset
6105 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6106 if (field_type == Primitive::kPrimLong) {
6107 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimLong));
6108 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006109 // Use Location::Any() to prevent situations when running out of available fp registers.
6110 locations->SetOut(Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006111 // Need some temp core regs since FP results are returned in core registers
6112 Location reg = calling_convention.GetReturnLocation(Primitive::kPrimLong);
6113 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairLow<Register>()));
6114 locations->AddTemp(Location::RegisterLocation(reg.AsRegisterPairHigh<Register>()));
6115 }
6116 } else {
6117 if (Primitive::IsFloatingPointType(instruction->GetType())) {
6118 locations->SetOut(Location::RequiresFpuRegister());
6119 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006120 // The output overlaps in the case of an object field get with
6121 // read barriers enabled: we do not want the move to overwrite the
6122 // object's location, as we need it to emit the read barrier.
6123 locations->SetOut(Location::RequiresRegister(),
6124 object_field_get_with_read_barrier
6125 ? Location::kOutputOverlap
6126 : Location::kNoOutputOverlap);
6127 }
6128 if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
6129 // We need a temporary register for the read barrier marking slow
6130 // path in CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006131 if (!kBakerReadBarrierThunksEnableForFields) {
6132 locations->AddTemp(Location::RequiresRegister());
6133 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006134 }
6135 }
6136}
6137
6138void InstructionCodeGeneratorMIPS::HandleFieldGet(HInstruction* instruction,
6139 const FieldInfo& field_info,
6140 uint32_t dex_pc) {
6141 Primitive::Type type = field_info.GetFieldType();
6142 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08006143 Location obj_loc = locations->InAt(0);
6144 Register obj = obj_loc.AsRegister<Register>();
6145 Location dst_loc = locations->Out();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006146 LoadOperandType load_type = kLoadUnsignedByte;
6147 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006148 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006149 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006150
6151 switch (type) {
6152 case Primitive::kPrimBoolean:
6153 load_type = kLoadUnsignedByte;
6154 break;
6155 case Primitive::kPrimByte:
6156 load_type = kLoadSignedByte;
6157 break;
6158 case Primitive::kPrimShort:
6159 load_type = kLoadSignedHalfword;
6160 break;
6161 case Primitive::kPrimChar:
6162 load_type = kLoadUnsignedHalfword;
6163 break;
6164 case Primitive::kPrimInt:
6165 case Primitive::kPrimFloat:
6166 case Primitive::kPrimNot:
6167 load_type = kLoadWord;
6168 break;
6169 case Primitive::kPrimLong:
6170 case Primitive::kPrimDouble:
6171 load_type = kLoadDoubleword;
6172 break;
6173 case Primitive::kPrimVoid:
6174 LOG(FATAL) << "Unreachable type " << type;
6175 UNREACHABLE();
6176 }
6177
6178 if (is_volatile && load_type == kLoadDoubleword) {
6179 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006180 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006181 // Do implicit Null check
6182 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6183 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
Serban Constantinescufca16662016-07-14 09:21:59 +01006184 codegen_->InvokeRuntime(kQuickA64Load, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006185 CheckEntrypointTypes<kQuickA64Load, int64_t, volatile const int64_t*>();
6186 if (type == Primitive::kPrimDouble) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006187 // FP results are returned in core registers. Need to move them.
Alexey Frunze15958152017-02-09 19:08:30 -08006188 if (dst_loc.IsFpuRegister()) {
6189 __ Mtc1(locations->GetTemp(1).AsRegister<Register>(), dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006190 __ MoveToFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunze15958152017-02-09 19:08:30 -08006191 dst_loc.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006192 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006193 DCHECK(dst_loc.IsDoubleStackSlot());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006194 __ StoreToOffset(kStoreWord,
6195 locations->GetTemp(1).AsRegister<Register>(),
6196 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006197 dst_loc.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006198 __ StoreToOffset(kStoreWord,
6199 locations->GetTemp(2).AsRegister<Register>(),
6200 SP,
Alexey Frunze15958152017-02-09 19:08:30 -08006201 dst_loc.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006202 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006203 }
6204 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006205 if (type == Primitive::kPrimNot) {
6206 // /* HeapReference<Object> */ dst = *(obj + offset)
6207 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006208 Location temp_loc =
6209 kBakerReadBarrierThunksEnableForFields ? Location::NoLocation() : locations->GetTemp(0);
Alexey Frunze15958152017-02-09 19:08:30 -08006210 // Note that a potential implicit null check is handled in this
6211 // CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier call.
6212 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6213 dst_loc,
6214 obj,
6215 offset,
6216 temp_loc,
6217 /* needs_null_check */ true);
6218 if (is_volatile) {
6219 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6220 }
6221 } else {
6222 __ LoadFromOffset(kLoadWord, dst_loc.AsRegister<Register>(), obj, offset, null_checker);
6223 if (is_volatile) {
6224 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6225 }
6226 // If read barriers are enabled, emit read barriers other than
6227 // Baker's using a slow path (and also unpoison the loaded
6228 // reference, if heap poisoning is enabled).
6229 codegen_->MaybeGenerateReadBarrierSlow(instruction, dst_loc, dst_loc, obj_loc, offset);
6230 }
6231 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006232 Register dst;
6233 if (type == Primitive::kPrimLong) {
Alexey Frunze15958152017-02-09 19:08:30 -08006234 DCHECK(dst_loc.IsRegisterPair());
6235 dst = dst_loc.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006236 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006237 DCHECK(dst_loc.IsRegister());
6238 dst = dst_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006239 }
Alexey Frunze2923db72016-08-20 01:55:47 -07006240 __ LoadFromOffset(load_type, dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006241 } else {
Alexey Frunze15958152017-02-09 19:08:30 -08006242 DCHECK(dst_loc.IsFpuRegister());
6243 FRegister dst = dst_loc.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006244 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006245 __ LoadSFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006246 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006247 __ LoadDFromOffset(dst, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006248 }
6249 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006250 }
6251
Alexey Frunze15958152017-02-09 19:08:30 -08006252 // Memory barriers, in the case of references, are handled in the
6253 // previous switch statement.
6254 if (is_volatile && (type != Primitive::kPrimNot)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006255 GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
6256 }
6257}
6258
6259void LocationsBuilderMIPS::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) {
6260 Primitive::Type field_type = field_info.GetFieldType();
6261 bool is_wide = (field_type == Primitive::kPrimLong) || (field_type == Primitive::kPrimDouble);
6262 bool generate_volatile = field_info.IsVolatile() && is_wide;
6263 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
Serban Constantinescu54ff4822016-07-07 18:03:19 +01006264 instruction, generate_volatile ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006265
6266 locations->SetInAt(0, Location::RequiresRegister());
6267 if (generate_volatile) {
6268 InvokeRuntimeCallingConvention calling_convention;
6269 // need A0 to hold base + offset
6270 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
6271 if (field_type == Primitive::kPrimLong) {
6272 locations->SetInAt(1, Location::RegisterPairLocation(
6273 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
6274 } else {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006275 // Use Location::Any() to prevent situations when running out of available fp registers.
6276 locations->SetInAt(1, Location::Any());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006277 // Pass FP parameters in core registers.
6278 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
6279 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
6280 }
6281 } else {
6282 if (Primitive::IsFloatingPointType(field_type)) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006283 locations->SetInAt(1, FpuRegisterOrConstantForStore(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006284 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006285 locations->SetInAt(1, RegisterOrZeroConstant(instruction->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006286 }
6287 }
6288}
6289
6290void InstructionCodeGeneratorMIPS::HandleFieldSet(HInstruction* instruction,
6291 const FieldInfo& field_info,
Goran Jakovljevice114da22016-12-26 14:21:43 +01006292 uint32_t dex_pc,
6293 bool value_can_be_null) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006294 Primitive::Type type = field_info.GetFieldType();
6295 LocationSummary* locations = instruction->GetLocations();
6296 Register obj = locations->InAt(0).AsRegister<Register>();
Alexey Frunzef58b2482016-09-02 22:14:06 -07006297 Location value_location = locations->InAt(1);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006298 StoreOperandType store_type = kStoreByte;
6299 bool is_volatile = field_info.IsVolatile();
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006300 uint32_t offset = field_info.GetFieldOffset().Uint32Value();
Alexey Frunzec061de12017-02-14 13:27:23 -08006301 bool needs_write_barrier = CodeGenerator::StoreNeedsWriteBarrier(type, instruction->InputAt(1));
Tijana Jakovljevic57433862017-01-17 16:59:03 +01006302 auto null_checker = GetImplicitNullChecker(instruction, codegen_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006303
6304 switch (type) {
6305 case Primitive::kPrimBoolean:
6306 case Primitive::kPrimByte:
6307 store_type = kStoreByte;
6308 break;
6309 case Primitive::kPrimShort:
6310 case Primitive::kPrimChar:
6311 store_type = kStoreHalfword;
6312 break;
6313 case Primitive::kPrimInt:
6314 case Primitive::kPrimFloat:
6315 case Primitive::kPrimNot:
6316 store_type = kStoreWord;
6317 break;
6318 case Primitive::kPrimLong:
6319 case Primitive::kPrimDouble:
6320 store_type = kStoreDoubleword;
6321 break;
6322 case Primitive::kPrimVoid:
6323 LOG(FATAL) << "Unreachable type " << type;
6324 UNREACHABLE();
6325 }
6326
6327 if (is_volatile) {
6328 GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
6329 }
6330
6331 if (is_volatile && store_type == kStoreDoubleword) {
6332 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevic73a42652015-11-20 17:22:57 +01006333 __ Addiu32(locations->GetTemp(0).AsRegister<Register>(), obj, offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006334 // Do implicit Null check.
6335 __ Lw(ZERO, locations->GetTemp(0).AsRegister<Register>(), 0);
6336 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
6337 if (type == Primitive::kPrimDouble) {
6338 // Pass FP parameters in core registers.
Alexey Frunzef58b2482016-09-02 22:14:06 -07006339 if (value_location.IsFpuRegister()) {
6340 __ Mfc1(locations->GetTemp(1).AsRegister<Register>(),
6341 value_location.AsFpuRegister<FRegister>());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006342 __ MoveFromFpuHigh(locations->GetTemp(2).AsRegister<Register>(),
Alexey Frunzef58b2482016-09-02 22:14:06 -07006343 value_location.AsFpuRegister<FRegister>());
6344 } else if (value_location.IsDoubleStackSlot()) {
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006345 __ LoadFromOffset(kLoadWord,
6346 locations->GetTemp(1).AsRegister<Register>(),
6347 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006348 value_location.GetStackIndex());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006349 __ LoadFromOffset(kLoadWord,
6350 locations->GetTemp(2).AsRegister<Register>(),
6351 SP,
Alexey Frunzef58b2482016-09-02 22:14:06 -07006352 value_location.GetStackIndex() + 4);
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006353 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006354 DCHECK(value_location.IsConstant());
6355 DCHECK(value_location.GetConstant()->IsDoubleConstant());
6356 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
Goran Jakovljeviccdd822f2016-07-22 09:46:43 +02006357 __ LoadConst64(locations->GetTemp(2).AsRegister<Register>(),
6358 locations->GetTemp(1).AsRegister<Register>(),
6359 value);
6360 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006361 }
Serban Constantinescufca16662016-07-14 09:21:59 +01006362 codegen_->InvokeRuntime(kQuickA64Store, instruction, dex_pc);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006363 CheckEntrypointTypes<kQuickA64Store, void, volatile int64_t *, int64_t>();
6364 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006365 if (value_location.IsConstant()) {
6366 int64_t value = CodeGenerator::GetInt64ValueOf(value_location.GetConstant());
6367 __ StoreConstToOffset(store_type, value, obj, offset, TMP, null_checker);
6368 } else if (!Primitive::IsFloatingPointType(type)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006369 Register src;
6370 if (type == Primitive::kPrimLong) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006371 src = value_location.AsRegisterPairLow<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006372 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006373 src = value_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006374 }
Alexey Frunzec061de12017-02-14 13:27:23 -08006375 if (kPoisonHeapReferences && needs_write_barrier) {
6376 // Note that in the case where `value` is a null reference,
6377 // we do not enter this block, as a null reference does not
6378 // need poisoning.
6379 DCHECK_EQ(type, Primitive::kPrimNot);
6380 __ PoisonHeapReference(TMP, src);
6381 __ StoreToOffset(store_type, TMP, obj, offset, null_checker);
6382 } else {
6383 __ StoreToOffset(store_type, src, obj, offset, null_checker);
6384 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006385 } else {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006386 FRegister src = value_location.AsFpuRegister<FRegister>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006387 if (type == Primitive::kPrimFloat) {
Alexey Frunze2923db72016-08-20 01:55:47 -07006388 __ StoreSToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006389 } else {
Alexey Frunze2923db72016-08-20 01:55:47 -07006390 __ StoreDToOffset(src, obj, offset, null_checker);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006391 }
6392 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006393 }
6394
Alexey Frunzec061de12017-02-14 13:27:23 -08006395 if (needs_write_barrier) {
Alexey Frunzef58b2482016-09-02 22:14:06 -07006396 Register src = value_location.AsRegister<Register>();
Goran Jakovljevice114da22016-12-26 14:21:43 +01006397 codegen_->MarkGCCard(obj, src, value_can_be_null);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006398 }
6399
6400 if (is_volatile) {
6401 GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
6402 }
6403}
6404
6405void LocationsBuilderMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6406 HandleFieldGet(instruction, instruction->GetFieldInfo());
6407}
6408
6409void InstructionCodeGeneratorMIPS::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
6410 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
6411}
6412
6413void LocationsBuilderMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
6414 HandleFieldSet(instruction, instruction->GetFieldInfo());
6415}
6416
6417void InstructionCodeGeneratorMIPS::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01006418 HandleFieldSet(instruction,
6419 instruction->GetFieldInfo(),
6420 instruction->GetDexPc(),
6421 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02006422}
6423
Alexey Frunze15958152017-02-09 19:08:30 -08006424void InstructionCodeGeneratorMIPS::GenerateReferenceLoadOneRegister(
6425 HInstruction* instruction,
6426 Location out,
6427 uint32_t offset,
6428 Location maybe_temp,
6429 ReadBarrierOption read_barrier_option) {
6430 Register out_reg = out.AsRegister<Register>();
6431 if (read_barrier_option == kWithReadBarrier) {
6432 CHECK(kEmitCompilerReadBarrier);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006433 if (!kUseBakerReadBarrier || !kBakerReadBarrierThunksEnableForFields) {
6434 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6435 }
Alexey Frunze15958152017-02-09 19:08:30 -08006436 if (kUseBakerReadBarrier) {
6437 // Load with fast path based Baker's read barrier.
6438 // /* HeapReference<Object> */ out = *(out + offset)
6439 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6440 out,
6441 out_reg,
6442 offset,
6443 maybe_temp,
6444 /* needs_null_check */ false);
6445 } else {
6446 // Load with slow path based read barrier.
6447 // Save the value of `out` into `maybe_temp` before overwriting it
6448 // in the following move operation, as we will need it for the
6449 // read barrier below.
6450 __ Move(maybe_temp.AsRegister<Register>(), out_reg);
6451 // /* HeapReference<Object> */ out = *(out + offset)
6452 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6453 codegen_->GenerateReadBarrierSlow(instruction, out, out, maybe_temp, offset);
6454 }
6455 } else {
6456 // Plain load with no read barrier.
6457 // /* HeapReference<Object> */ out = *(out + offset)
6458 __ LoadFromOffset(kLoadWord, out_reg, out_reg, offset);
6459 __ MaybeUnpoisonHeapReference(out_reg);
6460 }
6461}
6462
6463void InstructionCodeGeneratorMIPS::GenerateReferenceLoadTwoRegisters(
6464 HInstruction* instruction,
6465 Location out,
6466 Location obj,
6467 uint32_t offset,
6468 Location maybe_temp,
6469 ReadBarrierOption read_barrier_option) {
6470 Register out_reg = out.AsRegister<Register>();
6471 Register obj_reg = obj.AsRegister<Register>();
6472 if (read_barrier_option == kWithReadBarrier) {
6473 CHECK(kEmitCompilerReadBarrier);
6474 if (kUseBakerReadBarrier) {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006475 if (!kBakerReadBarrierThunksEnableForFields) {
6476 DCHECK(maybe_temp.IsRegister()) << maybe_temp;
6477 }
Alexey Frunze15958152017-02-09 19:08:30 -08006478 // Load with fast path based Baker's read barrier.
6479 // /* HeapReference<Object> */ out = *(obj + offset)
6480 codegen_->GenerateFieldLoadWithBakerReadBarrier(instruction,
6481 out,
6482 obj_reg,
6483 offset,
6484 maybe_temp,
6485 /* needs_null_check */ false);
6486 } else {
6487 // Load with slow path based read barrier.
6488 // /* HeapReference<Object> */ out = *(obj + offset)
6489 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6490 codegen_->GenerateReadBarrierSlow(instruction, out, out, obj, offset);
6491 }
6492 } else {
6493 // Plain load with no read barrier.
6494 // /* HeapReference<Object> */ out = *(obj + offset)
6495 __ LoadFromOffset(kLoadWord, out_reg, obj_reg, offset);
6496 __ MaybeUnpoisonHeapReference(out_reg);
6497 }
6498}
6499
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006500static inline int GetBakerMarkThunkNumber(Register reg) {
6501 static_assert(BAKER_MARK_INTROSPECTION_REGISTER_COUNT == 21, "Expecting equal");
6502 if (reg >= V0 && reg <= T7) { // 14 consequtive regs.
6503 return reg - V0;
6504 } else if (reg >= S2 && reg <= S7) { // 6 consequtive regs.
6505 return 14 + (reg - S2);
6506 } else if (reg == FP) { // One more.
6507 return 20;
6508 }
6509 LOG(FATAL) << "Unexpected register " << reg;
6510 UNREACHABLE();
6511}
6512
6513static inline int GetBakerMarkFieldArrayThunkDisplacement(Register reg, bool short_offset) {
6514 int num = GetBakerMarkThunkNumber(reg) +
6515 (short_offset ? BAKER_MARK_INTROSPECTION_REGISTER_COUNT : 0);
6516 return num * BAKER_MARK_INTROSPECTION_FIELD_ARRAY_ENTRY_SIZE;
6517}
6518
6519static inline int GetBakerMarkGcRootThunkDisplacement(Register reg) {
6520 return GetBakerMarkThunkNumber(reg) * BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRY_SIZE +
6521 BAKER_MARK_INTROSPECTION_GC_ROOT_ENTRIES_OFFSET;
6522}
6523
Alexey Frunze15958152017-02-09 19:08:30 -08006524void InstructionCodeGeneratorMIPS::GenerateGcRootFieldLoad(HInstruction* instruction,
6525 Location root,
6526 Register obj,
6527 uint32_t offset,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006528 ReadBarrierOption read_barrier_option,
6529 MipsLabel* label_low) {
6530 bool reordering;
6531 if (label_low != nullptr) {
6532 DCHECK_EQ(offset, 0x5678u);
6533 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006534 Register root_reg = root.AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08006535 if (read_barrier_option == kWithReadBarrier) {
6536 DCHECK(kEmitCompilerReadBarrier);
6537 if (kUseBakerReadBarrier) {
6538 // Fast path implementation of art::ReadBarrier::BarrierForRoot when
6539 // Baker's read barrier are used:
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006540 if (kBakerReadBarrierThunksEnableForGcRoots) {
6541 // Note that we do not actually check the value of `GetIsGcMarking()`
6542 // to decide whether to mark the loaded GC root or not. Instead, we
6543 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6544 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6545 // vice versa.
6546 //
6547 // We use thunks for the slow path. That thunk checks the reference
6548 // and jumps to the entrypoint if needed.
6549 //
6550 // temp = Thread::Current()->pReadBarrierMarkReg00
6551 // // AKA &art_quick_read_barrier_mark_introspection.
6552 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6553 // if (temp != nullptr) {
6554 // temp = &gc_root_thunk<root_reg>
6555 // root = temp(root)
6556 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006557
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006558 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
6559 const int32_t entry_point_offset =
6560 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6561 const int thunk_disp = GetBakerMarkGcRootThunkDisplacement(root_reg);
6562 int16_t offset_low = Low16Bits(offset);
6563 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign
6564 // extension in lw.
6565 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6566 Register base = short_offset ? obj : TMP;
6567 // Loading the entrypoint does not require a load acquire since it is only changed when
6568 // threads are suspended or running a checkpoint.
6569 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6570 reordering = __ SetReorder(false);
6571 if (!short_offset) {
6572 DCHECK(!label_low);
6573 __ AddUpper(base, obj, offset_high);
6574 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006575 MipsLabel skip_call;
6576 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006577 if (label_low != nullptr) {
6578 DCHECK(short_offset);
6579 __ Bind(label_low);
6580 }
6581 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6582 __ LoadFromOffset(kLoadWord, root_reg, base, offset_low); // Single instruction
6583 // in delay slot.
6584 if (isR6) {
6585 __ Jialc(T9, thunk_disp);
6586 } else {
6587 __ Addiu(T9, T9, thunk_disp);
6588 __ Jalr(T9);
6589 __ Nop();
6590 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006591 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006592 __ SetReorder(reordering);
6593 } else {
6594 // Note that we do not actually check the value of `GetIsGcMarking()`
6595 // to decide whether to mark the loaded GC root or not. Instead, we
6596 // load into `temp` (T9) the read barrier mark entry point corresponding
6597 // to register `root`. If `temp` is null, it means that `GetIsGcMarking()`
6598 // is false, and vice versa.
6599 //
6600 // GcRoot<mirror::Object> root = *(obj+offset); // Original reference load.
6601 // temp = Thread::Current()->pReadBarrierMarkReg ## root.reg()
6602 // if (temp != null) {
6603 // root = temp(root)
6604 // }
Alexey Frunze15958152017-02-09 19:08:30 -08006605
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006606 if (label_low != nullptr) {
6607 reordering = __ SetReorder(false);
6608 __ Bind(label_low);
6609 }
6610 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6611 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6612 if (label_low != nullptr) {
6613 __ SetReorder(reordering);
6614 }
6615 static_assert(
6616 sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>),
6617 "art::mirror::CompressedReference<mirror::Object> and art::GcRoot<mirror::Object> "
6618 "have different sizes.");
6619 static_assert(sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(int32_t),
6620 "art::mirror::CompressedReference<mirror::Object> and int32_t "
6621 "have different sizes.");
Alexey Frunze15958152017-02-09 19:08:30 -08006622
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006623 // Slow path marking the GC root `root`.
6624 Location temp = Location::RegisterLocation(T9);
6625 SlowPathCodeMIPS* slow_path =
6626 new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(
6627 instruction,
6628 root,
6629 /*entrypoint*/ temp);
6630 codegen_->AddSlowPath(slow_path);
6631
6632 const int32_t entry_point_offset =
6633 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(root.reg() - 1);
6634 // Loading the entrypoint does not require a load acquire since it is only changed when
6635 // threads are suspended or running a checkpoint.
6636 __ LoadFromOffset(kLoadWord, temp.AsRegister<Register>(), TR, entry_point_offset);
6637 __ Bnez(temp.AsRegister<Register>(), slow_path->GetEntryLabel());
6638 __ Bind(slow_path->GetExitLabel());
6639 }
Alexey Frunze15958152017-02-09 19:08:30 -08006640 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006641 if (label_low != nullptr) {
6642 reordering = __ SetReorder(false);
6643 __ Bind(label_low);
6644 }
Alexey Frunze15958152017-02-09 19:08:30 -08006645 // GC root loaded through a slow path for read barriers other
6646 // than Baker's.
6647 // /* GcRoot<mirror::Object>* */ root = obj + offset
6648 __ Addiu32(root_reg, obj, offset);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006649 if (label_low != nullptr) {
6650 __ SetReorder(reordering);
6651 }
Alexey Frunze15958152017-02-09 19:08:30 -08006652 // /* mirror::Object* */ root = root->Read()
6653 codegen_->GenerateReadBarrierForRootSlow(instruction, root, root);
6654 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006655 } else {
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006656 if (label_low != nullptr) {
6657 reordering = __ SetReorder(false);
6658 __ Bind(label_low);
6659 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006660 // Plain GC root load with no read barrier.
6661 // /* GcRoot<mirror::Object> */ root = *(obj + offset)
6662 __ LoadFromOffset(kLoadWord, root_reg, obj, offset);
6663 // Note that GC roots are not affected by heap poisoning, thus we
6664 // do not have to unpoison `root_reg` here.
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006665 if (label_low != nullptr) {
6666 __ SetReorder(reordering);
6667 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07006668 }
6669}
6670
Alexey Frunze15958152017-02-09 19:08:30 -08006671void CodeGeneratorMIPS::GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
6672 Location ref,
6673 Register obj,
6674 uint32_t offset,
6675 Location temp,
6676 bool needs_null_check) {
6677 DCHECK(kEmitCompilerReadBarrier);
6678 DCHECK(kUseBakerReadBarrier);
6679
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006680 if (kBakerReadBarrierThunksEnableForFields) {
6681 // Note that we do not actually check the value of `GetIsGcMarking()`
6682 // to decide whether to mark the loaded reference or not. Instead, we
6683 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6684 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6685 // vice versa.
6686 //
6687 // We use thunks for the slow path. That thunk checks the reference
6688 // and jumps to the entrypoint if needed. If the holder is not gray,
6689 // it issues a load-load memory barrier and returns to the original
6690 // reference load.
6691 //
6692 // temp = Thread::Current()->pReadBarrierMarkReg00
6693 // // AKA &art_quick_read_barrier_mark_introspection.
6694 // if (temp != nullptr) {
6695 // temp = &field_array_thunk<holder_reg>
6696 // temp()
6697 // }
6698 // not_gray_return_address:
6699 // // If the offset is too large to fit into the lw instruction, we
6700 // // use an adjusted base register (TMP) here. This register
6701 // // receives bits 16 ... 31 of the offset before the thunk invocation
6702 // // and the thunk benefits from it.
6703 // HeapReference<mirror::Object> reference = *(obj+offset); // Original reference load.
6704 // gray_return_address:
6705
6706 DCHECK(temp.IsInvalid());
6707 bool isR6 = GetInstructionSetFeatures().IsR6();
6708 int16_t offset_low = Low16Bits(offset);
6709 int16_t offset_high = High16Bits(offset - offset_low); // Accounts for sign extension in lw.
6710 bool short_offset = IsInt<16>(static_cast<int32_t>(offset));
6711 bool reordering = __ SetReorder(false);
6712 const int32_t entry_point_offset =
6713 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6714 // There may have or may have not been a null check if the field offset is smaller than
6715 // the page size.
6716 // There must've been a null check in case it's actually a load from an array.
6717 // We will, however, perform an explicit null check in the thunk as it's easier to
6718 // do it than not.
6719 if (instruction->IsArrayGet()) {
6720 DCHECK(!needs_null_check);
6721 }
6722 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, short_offset);
6723 // Loading the entrypoint does not require a load acquire since it is only changed when
6724 // threads are suspended or running a checkpoint.
6725 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6726 Register ref_reg = ref.AsRegister<Register>();
6727 Register base = short_offset ? obj : TMP;
Alexey Frunze0cab6562017-07-25 15:19:36 -07006728 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006729 if (short_offset) {
6730 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006731 __ Beqzc(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006732 __ Nop(); // In forbidden slot.
6733 __ Jialc(T9, thunk_disp);
6734 } else {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006735 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006736 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6737 __ Jalr(T9);
6738 __ Nop(); // In delay slot.
6739 }
Alexey Frunze0cab6562017-07-25 15:19:36 -07006740 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006741 } else {
6742 if (isR6) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006743 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006744 __ Aui(base, obj, offset_high); // In delay slot.
6745 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006746 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006747 } else {
6748 __ Lui(base, offset_high);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006749 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006750 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6751 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006752 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006753 __ Addu(base, base, obj); // In delay slot.
6754 }
6755 }
6756 // /* HeapReference<Object> */ ref = *(obj + offset)
6757 __ LoadFromOffset(kLoadWord, ref_reg, base, offset_low); // Single instruction.
6758 if (needs_null_check) {
6759 MaybeRecordImplicitNullCheck(instruction);
6760 }
6761 __ MaybeUnpoisonHeapReference(ref_reg);
6762 __ SetReorder(reordering);
6763 return;
6764 }
6765
Alexey Frunze15958152017-02-09 19:08:30 -08006766 // /* HeapReference<Object> */ ref = *(obj + offset)
6767 Location no_index = Location::NoLocation();
6768 ScaleFactor no_scale_factor = TIMES_1;
6769 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6770 ref,
6771 obj,
6772 offset,
6773 no_index,
6774 no_scale_factor,
6775 temp,
6776 needs_null_check);
6777}
6778
6779void CodeGeneratorMIPS::GenerateArrayLoadWithBakerReadBarrier(HInstruction* instruction,
6780 Location ref,
6781 Register obj,
6782 uint32_t data_offset,
6783 Location index,
6784 Location temp,
6785 bool needs_null_check) {
6786 DCHECK(kEmitCompilerReadBarrier);
6787 DCHECK(kUseBakerReadBarrier);
6788
6789 static_assert(
6790 sizeof(mirror::HeapReference<mirror::Object>) == sizeof(int32_t),
6791 "art::mirror::HeapReference<art::mirror::Object> and int32_t have different sizes.");
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006792 ScaleFactor scale_factor = TIMES_4;
6793
6794 if (kBakerReadBarrierThunksEnableForArrays) {
6795 // Note that we do not actually check the value of `GetIsGcMarking()`
6796 // to decide whether to mark the loaded reference or not. Instead, we
6797 // load into `temp` (T9) the read barrier mark introspection entrypoint.
6798 // If `temp` is null, it means that `GetIsGcMarking()` is false, and
6799 // vice versa.
6800 //
6801 // We use thunks for the slow path. That thunk checks the reference
6802 // and jumps to the entrypoint if needed. If the holder is not gray,
6803 // it issues a load-load memory barrier and returns to the original
6804 // reference load.
6805 //
6806 // temp = Thread::Current()->pReadBarrierMarkReg00
6807 // // AKA &art_quick_read_barrier_mark_introspection.
6808 // if (temp != nullptr) {
6809 // temp = &field_array_thunk<holder_reg>
6810 // temp()
6811 // }
6812 // not_gray_return_address:
6813 // // The element address is pre-calculated in the TMP register before the
6814 // // thunk invocation and the thunk benefits from it.
6815 // HeapReference<mirror::Object> reference = data[index]; // Original reference load.
6816 // gray_return_address:
6817
6818 DCHECK(temp.IsInvalid());
6819 DCHECK(index.IsValid());
6820 bool reordering = __ SetReorder(false);
6821 const int32_t entry_point_offset =
6822 Thread::ReadBarrierMarkEntryPointsOffset<kMipsPointerSize>(0);
6823 // We will not do the explicit null check in the thunk as some form of a null check
6824 // must've been done earlier.
6825 DCHECK(!needs_null_check);
6826 const int thunk_disp = GetBakerMarkFieldArrayThunkDisplacement(obj, /* short_offset */ false);
6827 // Loading the entrypoint does not require a load acquire since it is only changed when
6828 // threads are suspended or running a checkpoint.
6829 __ LoadFromOffset(kLoadWord, T9, TR, entry_point_offset);
6830 Register ref_reg = ref.AsRegister<Register>();
6831 Register index_reg = index.IsRegisterPair()
6832 ? index.AsRegisterPairLow<Register>()
6833 : index.AsRegister<Register>();
Alexey Frunze0cab6562017-07-25 15:19:36 -07006834 MipsLabel skip_call;
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006835 if (GetInstructionSetFeatures().IsR6()) {
Alexey Frunze0cab6562017-07-25 15:19:36 -07006836 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006837 __ Lsa(TMP, index_reg, obj, scale_factor); // In delay slot.
6838 __ Jialc(T9, thunk_disp);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006839 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006840 } else {
6841 __ Sll(TMP, index_reg, scale_factor);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006842 __ Beqz(T9, &skip_call, /* is_bare */ true);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006843 __ Addiu(T9, T9, thunk_disp); // In delay slot.
6844 __ Jalr(T9);
Alexey Frunze0cab6562017-07-25 15:19:36 -07006845 __ Bind(&skip_call);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07006846 __ Addu(TMP, TMP, obj); // In delay slot.
6847 }
6848 // /* HeapReference<Object> */ ref = *(obj + data_offset + (index << scale_factor))
6849 DCHECK(IsInt<16>(static_cast<int32_t>(data_offset))) << data_offset;
6850 __ LoadFromOffset(kLoadWord, ref_reg, TMP, data_offset); // Single instruction.
6851 __ MaybeUnpoisonHeapReference(ref_reg);
6852 __ SetReorder(reordering);
6853 return;
6854 }
6855
Alexey Frunze15958152017-02-09 19:08:30 -08006856 // /* HeapReference<Object> */ ref =
6857 // *(obj + data_offset + index * sizeof(HeapReference<Object>))
Alexey Frunze15958152017-02-09 19:08:30 -08006858 GenerateReferenceLoadWithBakerReadBarrier(instruction,
6859 ref,
6860 obj,
6861 data_offset,
6862 index,
6863 scale_factor,
6864 temp,
6865 needs_null_check);
6866}
6867
6868void CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier(HInstruction* instruction,
6869 Location ref,
6870 Register obj,
6871 uint32_t offset,
6872 Location index,
6873 ScaleFactor scale_factor,
6874 Location temp,
6875 bool needs_null_check,
6876 bool always_update_field) {
6877 DCHECK(kEmitCompilerReadBarrier);
6878 DCHECK(kUseBakerReadBarrier);
6879
6880 // In slow path based read barriers, the read barrier call is
6881 // inserted after the original load. However, in fast path based
6882 // Baker's read barriers, we need to perform the load of
6883 // mirror::Object::monitor_ *before* the original reference load.
6884 // This load-load ordering is required by the read barrier.
6885 // The fast path/slow path (for Baker's algorithm) should look like:
6886 //
6887 // uint32_t rb_state = Lockword(obj->monitor_).ReadBarrierState();
6888 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
6889 // HeapReference<Object> ref = *src; // Original reference load.
6890 // bool is_gray = (rb_state == ReadBarrier::GrayState());
6891 // if (is_gray) {
6892 // ref = ReadBarrier::Mark(ref); // Performed by runtime entrypoint slow path.
6893 // }
6894 //
6895 // Note: the original implementation in ReadBarrier::Barrier is
6896 // slightly more complex as it performs additional checks that we do
6897 // not do here for performance reasons.
6898
6899 Register ref_reg = ref.AsRegister<Register>();
6900 Register temp_reg = temp.AsRegister<Register>();
6901 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
6902
6903 // /* int32_t */ monitor = obj->monitor_
6904 __ LoadFromOffset(kLoadWord, temp_reg, obj, monitor_offset);
6905 if (needs_null_check) {
6906 MaybeRecordImplicitNullCheck(instruction);
6907 }
6908 // /* LockWord */ lock_word = LockWord(monitor)
6909 static_assert(sizeof(LockWord) == sizeof(int32_t),
6910 "art::LockWord and int32_t have different sizes.");
6911
6912 __ Sync(0); // Barrier to prevent load-load reordering.
6913
6914 // The actual reference load.
6915 if (index.IsValid()) {
6916 // Load types involving an "index": ArrayGet,
6917 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6918 // intrinsics.
6919 // /* HeapReference<Object> */ ref = *(obj + offset + (index << scale_factor))
6920 if (index.IsConstant()) {
6921 size_t computed_offset =
6922 (index.GetConstant()->AsIntConstant()->GetValue() << scale_factor) + offset;
6923 __ LoadFromOffset(kLoadWord, ref_reg, obj, computed_offset);
6924 } else {
6925 // Handle the special case of the
6926 // UnsafeGetObject/UnsafeGetObjectVolatile and UnsafeCASObject
6927 // intrinsics, which use a register pair as index ("long
6928 // offset"), of which only the low part contains data.
6929 Register index_reg = index.IsRegisterPair()
6930 ? index.AsRegisterPairLow<Register>()
6931 : index.AsRegister<Register>();
Chris Larsencd0295d2017-03-31 15:26:54 -07006932 __ ShiftAndAdd(TMP, index_reg, obj, scale_factor, TMP);
Alexey Frunze15958152017-02-09 19:08:30 -08006933 __ LoadFromOffset(kLoadWord, ref_reg, TMP, offset);
6934 }
6935 } else {
6936 // /* HeapReference<Object> */ ref = *(obj + offset)
6937 __ LoadFromOffset(kLoadWord, ref_reg, obj, offset);
6938 }
6939
6940 // Object* ref = ref_addr->AsMirrorPtr()
6941 __ MaybeUnpoisonHeapReference(ref_reg);
6942
6943 // Slow path marking the object `ref` when it is gray.
6944 SlowPathCodeMIPS* slow_path;
6945 if (always_update_field) {
6946 // ReadBarrierMarkAndUpdateFieldSlowPathMIPS only supports address
6947 // of the form `obj + field_offset`, where `obj` is a register and
6948 // `field_offset` is a register pair (of which only the lower half
6949 // is used). Thus `offset` and `scale_factor` above are expected
6950 // to be null in this code path.
6951 DCHECK_EQ(offset, 0u);
6952 DCHECK_EQ(scale_factor, ScaleFactor::TIMES_1);
6953 slow_path = new (GetGraph()->GetArena())
6954 ReadBarrierMarkAndUpdateFieldSlowPathMIPS(instruction,
6955 ref,
6956 obj,
6957 /* field_offset */ index,
6958 temp_reg);
6959 } else {
6960 slow_path = new (GetGraph()->GetArena()) ReadBarrierMarkSlowPathMIPS(instruction, ref);
6961 }
6962 AddSlowPath(slow_path);
6963
6964 // if (rb_state == ReadBarrier::GrayState())
6965 // ref = ReadBarrier::Mark(ref);
6966 // Given the numeric representation, it's enough to check the low bit of the
6967 // rb_state. We do that by shifting the bit into the sign bit (31) and
6968 // performing a branch on less than zero.
6969 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
6970 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
6971 static_assert(LockWord::kReadBarrierStateSize == 1, "Expecting 1-bit read barrier state size");
6972 __ Sll(temp_reg, temp_reg, 31 - LockWord::kReadBarrierStateShift);
6973 __ Bltz(temp_reg, slow_path->GetEntryLabel());
6974 __ Bind(slow_path->GetExitLabel());
6975}
6976
6977void CodeGeneratorMIPS::GenerateReadBarrierSlow(HInstruction* instruction,
6978 Location out,
6979 Location ref,
6980 Location obj,
6981 uint32_t offset,
6982 Location index) {
6983 DCHECK(kEmitCompilerReadBarrier);
6984
6985 // Insert a slow path based read barrier *after* the reference load.
6986 //
6987 // If heap poisoning is enabled, the unpoisoning of the loaded
6988 // reference will be carried out by the runtime within the slow
6989 // path.
6990 //
6991 // Note that `ref` currently does not get unpoisoned (when heap
6992 // poisoning is enabled), which is alright as the `ref` argument is
6993 // not used by the artReadBarrierSlow entry point.
6994 //
6995 // TODO: Unpoison `ref` when it is used by artReadBarrierSlow.
6996 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena())
6997 ReadBarrierForHeapReferenceSlowPathMIPS(instruction, out, ref, obj, offset, index);
6998 AddSlowPath(slow_path);
6999
7000 __ B(slow_path->GetEntryLabel());
7001 __ Bind(slow_path->GetExitLabel());
7002}
7003
7004void CodeGeneratorMIPS::MaybeGenerateReadBarrierSlow(HInstruction* instruction,
7005 Location out,
7006 Location ref,
7007 Location obj,
7008 uint32_t offset,
7009 Location index) {
7010 if (kEmitCompilerReadBarrier) {
7011 // Baker's read barriers shall be handled by the fast path
7012 // (CodeGeneratorMIPS::GenerateReferenceLoadWithBakerReadBarrier).
7013 DCHECK(!kUseBakerReadBarrier);
7014 // If heap poisoning is enabled, unpoisoning will be taken care of
7015 // by the runtime within the slow path.
7016 GenerateReadBarrierSlow(instruction, out, ref, obj, offset, index);
7017 } else if (kPoisonHeapReferences) {
7018 __ UnpoisonHeapReference(out.AsRegister<Register>());
7019 }
7020}
7021
7022void CodeGeneratorMIPS::GenerateReadBarrierForRootSlow(HInstruction* instruction,
7023 Location out,
7024 Location root) {
7025 DCHECK(kEmitCompilerReadBarrier);
7026
7027 // Insert a slow path based read barrier *after* the GC root load.
7028 //
7029 // Note that GC roots are not affected by heap poisoning, so we do
7030 // not need to do anything special for this here.
7031 SlowPathCodeMIPS* slow_path =
7032 new (GetGraph()->GetArena()) ReadBarrierForRootSlowPathMIPS(instruction, out, root);
7033 AddSlowPath(slow_path);
7034
7035 __ B(slow_path->GetEntryLabel());
7036 __ Bind(slow_path->GetExitLabel());
7037}
7038
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007039void LocationsBuilderMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007040 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
7041 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007042 bool baker_read_barrier_slow_path = false;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007043 switch (type_check_kind) {
7044 case TypeCheckKind::kExactCheck:
7045 case TypeCheckKind::kAbstractClassCheck:
7046 case TypeCheckKind::kClassHierarchyCheck:
7047 case TypeCheckKind::kArrayObjectCheck:
Alexey Frunze15958152017-02-09 19:08:30 -08007048 call_kind =
7049 kEmitCompilerReadBarrier ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007050 baker_read_barrier_slow_path = kUseBakerReadBarrier;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007051 break;
7052 case TypeCheckKind::kArrayCheck:
7053 case TypeCheckKind::kUnresolvedCheck:
7054 case TypeCheckKind::kInterfaceCheck:
7055 call_kind = LocationSummary::kCallOnSlowPath;
7056 break;
7057 }
7058
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007059 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007060 if (baker_read_barrier_slow_path) {
7061 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7062 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007063 locations->SetInAt(0, Location::RequiresRegister());
7064 locations->SetInAt(1, Location::RequiresRegister());
7065 // The output does overlap inputs.
7066 // Note that TypeCheckSlowPathMIPS uses this register too.
7067 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Alexey Frunze15958152017-02-09 19:08:30 -08007068 locations->AddRegisterTemps(NumberOfInstanceOfTemps(type_check_kind));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007069}
7070
7071void InstructionCodeGeneratorMIPS::VisitInstanceOf(HInstanceOf* instruction) {
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007072 TypeCheckKind type_check_kind = instruction->GetTypeCheckKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007073 LocationSummary* locations = instruction->GetLocations();
Alexey Frunze15958152017-02-09 19:08:30 -08007074 Location obj_loc = locations->InAt(0);
7075 Register obj = obj_loc.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007076 Register cls = locations->InAt(1).AsRegister<Register>();
Alexey Frunze15958152017-02-09 19:08:30 -08007077 Location out_loc = locations->Out();
7078 Register out = out_loc.AsRegister<Register>();
7079 const size_t num_temps = NumberOfInstanceOfTemps(type_check_kind);
7080 DCHECK_LE(num_temps, 1u);
7081 Location maybe_temp_loc = (num_temps >= 1) ? locations->GetTemp(0) : Location::NoLocation();
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007082 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
7083 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
7084 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
7085 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007086 MipsLabel done;
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007087 SlowPathCodeMIPS* slow_path = nullptr;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007088
7089 // Return 0 if `obj` is null.
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007090 // Avoid this check if we know `obj` is not null.
7091 if (instruction->MustDoNullCheck()) {
7092 __ Move(out, ZERO);
7093 __ Beqz(obj, &done);
7094 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007095
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007096 switch (type_check_kind) {
7097 case TypeCheckKind::kExactCheck: {
7098 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007099 GenerateReferenceLoadTwoRegisters(instruction,
7100 out_loc,
7101 obj_loc,
7102 class_offset,
7103 maybe_temp_loc,
7104 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007105 // Classes must be equal for the instanceof to succeed.
7106 __ Xor(out, out, cls);
7107 __ Sltiu(out, out, 1);
7108 break;
7109 }
7110
7111 case TypeCheckKind::kAbstractClassCheck: {
7112 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007113 GenerateReferenceLoadTwoRegisters(instruction,
7114 out_loc,
7115 obj_loc,
7116 class_offset,
7117 maybe_temp_loc,
7118 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007119 // If the class is abstract, we eagerly fetch the super class of the
7120 // object to avoid doing a comparison we know will fail.
7121 MipsLabel loop;
7122 __ Bind(&loop);
7123 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007124 GenerateReferenceLoadOneRegister(instruction,
7125 out_loc,
7126 super_offset,
7127 maybe_temp_loc,
7128 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007129 // If `out` is null, we use it for the result, and jump to `done`.
7130 __ Beqz(out, &done);
7131 __ Bne(out, cls, &loop);
7132 __ LoadConst32(out, 1);
7133 break;
7134 }
7135
7136 case TypeCheckKind::kClassHierarchyCheck: {
7137 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007138 GenerateReferenceLoadTwoRegisters(instruction,
7139 out_loc,
7140 obj_loc,
7141 class_offset,
7142 maybe_temp_loc,
7143 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007144 // Walk over the class hierarchy to find a match.
7145 MipsLabel loop, success;
7146 __ Bind(&loop);
7147 __ Beq(out, cls, &success);
7148 // /* HeapReference<Class> */ out = out->super_class_
Alexey Frunze15958152017-02-09 19:08:30 -08007149 GenerateReferenceLoadOneRegister(instruction,
7150 out_loc,
7151 super_offset,
7152 maybe_temp_loc,
7153 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007154 __ Bnez(out, &loop);
7155 // If `out` is null, we use it for the result, and jump to `done`.
7156 __ B(&done);
7157 __ Bind(&success);
7158 __ LoadConst32(out, 1);
7159 break;
7160 }
7161
7162 case TypeCheckKind::kArrayObjectCheck: {
7163 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007164 GenerateReferenceLoadTwoRegisters(instruction,
7165 out_loc,
7166 obj_loc,
7167 class_offset,
7168 maybe_temp_loc,
7169 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007170 // Do an exact check.
7171 MipsLabel success;
7172 __ Beq(out, cls, &success);
7173 // Otherwise, we need to check that the object's class is a non-primitive array.
7174 // /* HeapReference<Class> */ out = out->component_type_
Alexey Frunze15958152017-02-09 19:08:30 -08007175 GenerateReferenceLoadOneRegister(instruction,
7176 out_loc,
7177 component_offset,
7178 maybe_temp_loc,
7179 kCompilerReadBarrierOption);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007180 // If `out` is null, we use it for the result, and jump to `done`.
7181 __ Beqz(out, &done);
7182 __ LoadFromOffset(kLoadUnsignedHalfword, out, out, primitive_offset);
7183 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
7184 __ Sltiu(out, out, 1);
7185 __ B(&done);
7186 __ Bind(&success);
7187 __ LoadConst32(out, 1);
7188 break;
7189 }
7190
7191 case TypeCheckKind::kArrayCheck: {
7192 // No read barrier since the slow path will retry upon failure.
7193 // /* HeapReference<Class> */ out = obj->klass_
Alexey Frunze15958152017-02-09 19:08:30 -08007194 GenerateReferenceLoadTwoRegisters(instruction,
7195 out_loc,
7196 obj_loc,
7197 class_offset,
7198 maybe_temp_loc,
7199 kWithoutReadBarrier);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007200 DCHECK(locations->OnlyCallsOnSlowPath());
7201 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7202 /* is_fatal */ false);
7203 codegen_->AddSlowPath(slow_path);
7204 __ Bne(out, cls, slow_path->GetEntryLabel());
7205 __ LoadConst32(out, 1);
7206 break;
7207 }
7208
7209 case TypeCheckKind::kUnresolvedCheck:
7210 case TypeCheckKind::kInterfaceCheck: {
7211 // Note that we indeed only call on slow path, but we always go
7212 // into the slow path for the unresolved and interface check
7213 // cases.
7214 //
7215 // We cannot directly call the InstanceofNonTrivial runtime
7216 // entry point without resorting to a type checking slow path
7217 // here (i.e. by calling InvokeRuntime directly), as it would
7218 // require to assign fixed registers for the inputs of this
7219 // HInstanceOf instruction (following the runtime calling
7220 // convention), which might be cluttered by the potential first
7221 // read barrier emission at the beginning of this method.
7222 //
7223 // TODO: Introduce a new runtime entry point taking the object
7224 // to test (instead of its class) as argument, and let it deal
7225 // with the read barrier issues. This will let us refactor this
7226 // case of the `switch` code as it was previously (with a direct
7227 // call to the runtime not using a type checking slow path).
7228 // This should also be beneficial for the other cases above.
7229 DCHECK(locations->OnlyCallsOnSlowPath());
7230 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathMIPS(instruction,
7231 /* is_fatal */ false);
7232 codegen_->AddSlowPath(slow_path);
7233 __ B(slow_path->GetEntryLabel());
7234 break;
7235 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007236 }
7237
7238 __ Bind(&done);
Alexey Frunze66b69ad2017-02-24 00:51:44 -08007239
7240 if (slow_path != nullptr) {
7241 __ Bind(slow_path->GetExitLabel());
7242 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007243}
7244
7245void LocationsBuilderMIPS::VisitIntConstant(HIntConstant* constant) {
7246 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7247 locations->SetOut(Location::ConstantLocation(constant));
7248}
7249
7250void InstructionCodeGeneratorMIPS::VisitIntConstant(HIntConstant* constant ATTRIBUTE_UNUSED) {
7251 // Will be generated at use site.
7252}
7253
7254void LocationsBuilderMIPS::VisitNullConstant(HNullConstant* constant) {
7255 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7256 locations->SetOut(Location::ConstantLocation(constant));
7257}
7258
7259void InstructionCodeGeneratorMIPS::VisitNullConstant(HNullConstant* constant ATTRIBUTE_UNUSED) {
7260 // Will be generated at use site.
7261}
7262
7263void LocationsBuilderMIPS::HandleInvoke(HInvoke* invoke) {
7264 InvokeDexCallingConventionVisitorMIPS calling_convention_visitor;
7265 CodeGenerator::CreateCommonInvokeLocationSummary(invoke, &calling_convention_visitor);
7266}
7267
7268void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7269 HandleInvoke(invoke);
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007270 // The register T7 is required to be used for the hidden argument in
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007271 // art_quick_imt_conflict_trampoline, so add the hidden argument.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08007272 invoke->GetLocations()->AddTemp(Location::RegisterLocation(T7));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007273}
7274
7275void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) {
7276 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
7277 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007278 Location receiver = invoke->GetLocations()->InAt(0);
7279 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007280 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007281
7282 // Set the hidden argument.
7283 __ LoadConst32(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(),
7284 invoke->GetDexMethodIndex());
7285
7286 // temp = object->GetClass();
7287 if (receiver.IsStackSlot()) {
7288 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
7289 __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
7290 } else {
7291 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset);
7292 }
7293 codegen_->MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007294 // Instead of simply (possibly) unpoisoning `temp` here, we should
7295 // emit a read barrier for the previous class reference load.
7296 // However this is not required in practice, as this is an
7297 // intermediate/temporary reference and because the current
7298 // concurrent copying collector keeps the from-space memory
7299 // intact/accessible until the end of the marking phase (the
7300 // concurrent copying collector may not in the future).
7301 __ MaybeUnpoisonHeapReference(temp);
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00007302 __ LoadFromOffset(kLoadWord, temp, temp,
7303 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
7304 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00007305 invoke->GetImtIndex(), kMipsPointerSize));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007306 // temp = temp->GetImtEntryAt(method_offset);
7307 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7308 // T9 = temp->GetEntryPoint();
7309 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7310 // T9();
7311 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007312 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007313 DCHECK(!codegen_->IsLeafMethod());
7314 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
7315}
7316
7317void LocationsBuilderMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
Chris Larsen701566a2015-10-27 15:29:13 -07007318 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7319 if (intrinsic.TryDispatch(invoke)) {
7320 return;
7321 }
7322
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007323 HandleInvoke(invoke);
7324}
7325
7326void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007327 // Explicit clinit checks triggered by static invokes must have been pruned by
7328 // art::PrepareForRegisterAllocation.
7329 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007330
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007331 bool is_r6 = codegen_->GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007332 bool has_extra_input = invoke->HasPcRelativeMethodLoadKind() && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007333
Chris Larsen701566a2015-10-27 15:29:13 -07007334 IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
7335 if (intrinsic.TryDispatch(invoke)) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007336 if (invoke->GetLocations()->CanCall() && has_extra_input) {
7337 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::Any());
7338 }
Chris Larsen701566a2015-10-27 15:29:13 -07007339 return;
7340 }
7341
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007342 HandleInvoke(invoke);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007343
7344 // Add the extra input register if either the dex cache array base register
7345 // or the PC-relative base register for accessing literals is needed.
7346 if (has_extra_input) {
7347 invoke->GetLocations()->SetInAt(invoke->GetSpecialInputIndex(), Location::RequiresRegister());
7348 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007349}
7350
Orion Hodsonac141392017-01-13 11:53:47 +00007351void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7352 HandleInvoke(invoke);
7353}
7354
7355void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
7356 codegen_->GenerateInvokePolymorphicCall(invoke);
7357}
7358
Chris Larsen701566a2015-10-27 15:29:13 -07007359static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007360 if (invoke->GetLocations()->Intrinsified()) {
Chris Larsen701566a2015-10-27 15:29:13 -07007361 IntrinsicCodeGeneratorMIPS intrinsic(codegen);
7362 intrinsic.Dispatch(invoke);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007363 return true;
7364 }
7365 return false;
7366}
7367
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007368HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
Alexey Frunze06a46c42016-07-19 15:00:40 -07007369 HLoadString::LoadKind desired_string_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007370 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007371 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007372 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
Vladimir Markoaad75c62016-10-03 08:46:48 +00007373 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007374 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007375 bool is_r6 = GetInstructionSetFeatures().IsR6();
7376 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007377 switch (desired_string_load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007378 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007379 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007380 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007381 DCHECK(!Runtime::Current()->UseJitCompilation());
Alexey Frunze06a46c42016-07-19 15:00:40 -07007382 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007383 case HLoadString::LoadKind::kBootImageAddress:
7384 break;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007385 case HLoadString::LoadKind::kJitTableAddress:
7386 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007387 fallback_load = false;
Nicolas Geoffray132d8362016-11-16 09:19:42 +00007388 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007389 case HLoadString::LoadKind::kRuntimeCall:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007390 fallback_load = false;
7391 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007392 }
7393 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007394 desired_string_load_kind = HLoadString::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007395 }
7396 return desired_string_load_kind;
Vladimir Markocac5a7e2016-02-22 10:39:50 +00007397}
7398
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007399HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
7400 HLoadClass::LoadKind desired_class_load_kind) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007401 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunze06a46c42016-07-19 15:00:40 -07007402 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007403 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7404 // with irreducible loops.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007405 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007406 bool is_r6 = GetInstructionSetFeatures().IsR6();
7407 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007408 switch (desired_class_load_kind) {
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007409 case HLoadClass::LoadKind::kInvalid:
7410 LOG(FATAL) << "UNREACHABLE";
7411 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007412 case HLoadClass::LoadKind::kReferrersClass:
7413 fallback_load = false;
7414 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007415 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007416 case HLoadClass::LoadKind::kBssEntry:
7417 DCHECK(!Runtime::Current()->UseJitCompilation());
7418 break;
Vladimir Marko764d4542017-05-16 10:31:41 +01007419 case HLoadClass::LoadKind::kBootImageAddress:
7420 break;
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007421 case HLoadClass::LoadKind::kJitTableAddress:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007422 DCHECK(Runtime::Current()->UseJitCompilation());
Alexey Frunze627c1a02017-01-30 19:28:14 -08007423 fallback_load = false;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007424 break;
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007425 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007426 fallback_load = false;
7427 break;
7428 }
7429 if (fallback_load) {
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007430 desired_class_load_kind = HLoadClass::LoadKind::kRuntimeCall;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007431 }
7432 return desired_class_load_kind;
Vladimir Markodbb7f5b2016-03-30 13:23:58 +01007433}
7434
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007435Register CodeGeneratorMIPS::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
7436 Register temp) {
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007437 CHECK(!GetInstructionSetFeatures().IsR6());
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007438 CHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u);
7439 Location location = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
7440 if (!invoke->GetLocations()->Intrinsified()) {
7441 return location.AsRegister<Register>();
7442 }
7443 // For intrinsics we allow any location, so it may be on the stack.
7444 if (!location.IsRegister()) {
7445 __ LoadFromOffset(kLoadWord, temp, SP, location.GetStackIndex());
7446 return temp;
7447 }
7448 // For register locations, check if the register was saved. If so, get it from the stack.
7449 // Note: There is a chance that the register was saved but not overwritten, so we could
7450 // save one load. However, since this is just an intrinsic slow path we prefer this
7451 // simple and more robust approach rather that trying to determine if that's the case.
7452 SlowPathCode* slow_path = GetCurrentSlowPath();
7453 DCHECK(slow_path != nullptr); // For intrinsified invokes the call is emitted on the slow path.
7454 if (slow_path->IsCoreRegisterSaved(location.AsRegister<Register>())) {
7455 int stack_offset = slow_path->GetStackOffsetOfCoreRegister(location.AsRegister<Register>());
7456 __ LoadFromOffset(kLoadWord, temp, SP, stack_offset);
7457 return temp;
7458 }
7459 return location.AsRegister<Register>();
7460}
7461
Vladimir Markodc151b22015-10-15 18:02:30 +01007462HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
7463 const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
Nicolas Geoffray5e4e11e2016-09-22 13:17:41 +01007464 HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007465 HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007466 // We disable PC-relative load on pre-R6 when there is an irreducible loop, as the optimization
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007467 // is incompatible with it.
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007468 // TODO: Create as many HMipsComputeBaseMethodAddress instructions as needed for methods
7469 // with irreducible loops.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007470 bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007471 bool is_r6 = GetInstructionSetFeatures().IsR6();
7472 bool fallback_load = has_irreducible_loops && !is_r6;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007473 switch (dispatch_info.method_load_kind) {
Vladimir Marko65979462017-05-19 17:25:12 +01007474 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007475 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007476 break;
Vladimir Markodc151b22015-10-15 18:02:30 +01007477 default:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007478 fallback_load = false;
Vladimir Markodc151b22015-10-15 18:02:30 +01007479 break;
7480 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007481 if (fallback_load) {
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007482 dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall;
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007483 dispatch_info.method_load_data = 0;
7484 }
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007485 return dispatch_info;
Vladimir Markodc151b22015-10-15 18:02:30 +01007486}
7487
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007488void CodeGeneratorMIPS::GenerateStaticOrDirectCall(
7489 HInvokeStaticOrDirect* invoke, Location temp, SlowPathCode* slow_path) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007490 // All registers are assumed to be correctly set up per the calling convention.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007491 Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007492 HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
7493 HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
Alexey Frunze6b892cd2017-01-03 17:11:38 -08007494 bool is_r6 = GetInstructionSetFeatures().IsR6();
Vladimir Marko65979462017-05-19 17:25:12 +01007495 Register base_reg = (invoke->HasPcRelativeMethodLoadKind() && !is_r6)
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007496 ? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
7497 : ZERO;
7498
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007499 switch (method_load_kind) {
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007500 case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007501 // temp = thread->string_init_entrypoint
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007502 uint32_t offset =
7503 GetThreadOffset<kMipsPointerSize>(invoke->GetStringInitEntryPoint()).Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007504 __ LoadFromOffset(kLoadWord,
7505 temp.AsRegister<Register>(),
7506 TR,
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007507 offset);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007508 break;
Nicolas Geoffrayda079bb2016-09-26 17:56:07 +01007509 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007510 case HInvokeStaticOrDirect::MethodLoadKind::kRecursive:
Vladimir Markoc53c0792015-11-19 15:48:33 +00007511 callee_method = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007512 break;
Vladimir Marko65979462017-05-19 17:25:12 +01007513 case HInvokeStaticOrDirect::MethodLoadKind::kBootImageLinkTimePcRelative: {
7514 DCHECK(GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007515 PcRelativePatchInfo* info_high = NewPcRelativeMethodPatch(invoke->GetTargetMethod());
7516 PcRelativePatchInfo* info_low =
7517 NewPcRelativeMethodPatch(invoke->GetTargetMethod(), info_high);
Vladimir Marko65979462017-05-19 17:25:12 +01007518 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007519 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg);
7520 __ Addiu(temp_reg, TMP, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko65979462017-05-19 17:25:12 +01007521 break;
7522 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007523 case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
7524 __ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
7525 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007526 case HInvokeStaticOrDirect::MethodLoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007527 PcRelativePatchInfo* info_high = NewMethodBssEntryPatch(
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007528 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()));
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007529 PcRelativePatchInfo* info_low = NewMethodBssEntryPatch(
7530 MethodReference(&GetGraph()->GetDexFile(), invoke->GetDexMethodIndex()), info_high);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007531 Register temp_reg = temp.AsRegister<Register>();
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007532 EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base_reg);
7533 __ Lw(temp_reg, TMP, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007534 break;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01007535 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007536 case HInvokeStaticOrDirect::MethodLoadKind::kRuntimeCall: {
7537 GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
7538 return; // No code pointer retrieval; the runtime performs the call directly.
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007539 }
7540 }
7541
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007542 switch (code_ptr_location) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007543 case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
Alexey Frunzee3fb2452016-05-10 16:08:05 -07007544 __ Bal(&frame_entry_label_);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007545 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007546 case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
7547 // T9 = callee_method->entry_point_from_quick_compiled_code_;
Goran Jakovljevic1a878372015-10-26 14:28:52 +01007548 __ LoadFromOffset(kLoadWord,
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007549 T9,
7550 callee_method.AsRegister<Register>(),
7551 ArtMethod::EntryPointFromQuickCompiledCodeOffset(
Andreas Gampe542451c2016-07-26 09:02:02 -07007552 kMipsPointerSize).Int32Value());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007553 // T9()
7554 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007555 __ NopIfNoReordering();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007556 break;
7557 }
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007558 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
7559
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007560 DCHECK(!IsLeafMethod());
7561}
7562
7563void InstructionCodeGeneratorMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
David Brazdil58282f42016-01-14 12:45:10 +00007564 // Explicit clinit checks triggered by static invokes must have been pruned by
7565 // art::PrepareForRegisterAllocation.
7566 DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007567
7568 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7569 return;
7570 }
7571
7572 LocationSummary* locations = invoke->GetLocations();
7573 codegen_->GenerateStaticOrDirectCall(invoke,
7574 locations->HasTemps()
7575 ? locations->GetTemp(0)
7576 : Location::NoLocation());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007577}
7578
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007579void CodeGeneratorMIPS::GenerateVirtualCall(
7580 HInvokeVirtual* invoke, Location temp_location, SlowPathCode* slow_path) {
Goran Jakovljevice919b072016-10-04 10:17:34 +02007581 // Use the calling convention instead of the location of the receiver, as
7582 // intrinsics may have put the receiver in a different register. In the intrinsics
7583 // slow path, the arguments have been moved to the right place, so here we are
7584 // guaranteed that the receiver is the first register of the calling convention.
7585 InvokeDexCallingConvention calling_convention;
7586 Register receiver = calling_convention.GetRegisterAt(0);
7587
Chris Larsen3acee732015-11-18 13:31:08 -08007588 Register temp = temp_location.AsRegister<Register>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007589 size_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
7590 invoke->GetVTableIndex(), kMipsPointerSize).SizeValue();
7591 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
Andreas Gampe542451c2016-07-26 09:02:02 -07007592 Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007593
7594 // temp = object->GetClass();
Goran Jakovljevice919b072016-10-04 10:17:34 +02007595 __ LoadFromOffset(kLoadWord, temp, receiver, class_offset);
Chris Larsen3acee732015-11-18 13:31:08 -08007596 MaybeRecordImplicitNullCheck(invoke);
Alexey Frunzec061de12017-02-14 13:27:23 -08007597 // Instead of simply (possibly) unpoisoning `temp` here, we should
7598 // emit a read barrier for the previous class reference load.
7599 // However this is not required in practice, as this is an
7600 // intermediate/temporary reference and because the current
7601 // concurrent copying collector keeps the from-space memory
7602 // intact/accessible until the end of the marking phase (the
7603 // concurrent copying collector may not in the future).
7604 __ MaybeUnpoisonHeapReference(temp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007605 // temp = temp->GetMethodAt(method_offset);
7606 __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
7607 // T9 = temp->GetEntryPoint();
7608 __ LoadFromOffset(kLoadWord, T9, temp, entry_point.Int32Value());
7609 // T9();
7610 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07007611 __ NopIfNoReordering();
Vladimir Markoe7197bf2017-06-02 17:00:23 +01007612 RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
Chris Larsen3acee732015-11-18 13:31:08 -08007613}
7614
7615void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
7616 if (TryGenerateIntrinsicCode(invoke, codegen_)) {
7617 return;
7618 }
7619
7620 codegen_->GenerateVirtualCall(invoke, invoke->GetLocations()->GetTemp(0));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007621 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007622}
7623
7624void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
Vladimir Marko41559982017-01-06 14:04:23 +00007625 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007626 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007627 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007628 Location loc = Location::RegisterLocation(calling_convention.GetRegisterAt(0));
7629 CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(cls, loc, loc);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007630 return;
7631 }
Vladimir Marko41559982017-01-06 14:04:23 +00007632 DCHECK(!cls->NeedsAccessCheck());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007633 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze15958152017-02-09 19:08:30 -08007634 const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
7635 LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
Alexey Frunze06a46c42016-07-19 15:00:40 -07007636 ? LocationSummary::kCallOnSlowPath
7637 : LocationSummary::kNoCall;
7638 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007639 if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) {
7640 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
7641 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007642 switch (load_kind) {
7643 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007644 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007645 case HLoadClass::LoadKind::kBootImageAddress:
7646 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007647 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007648 break;
7649 }
7650 FALLTHROUGH_INTENDED;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007651 case HLoadClass::LoadKind::kReferrersClass:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007652 locations->SetInAt(0, Location::RequiresRegister());
7653 break;
7654 default:
7655 break;
7656 }
7657 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007658 if (load_kind == HLoadClass::LoadKind::kBssEntry) {
7659 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7660 // Rely on the type resolution or initialization and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007661 // Request a temp to hold the BSS entry location for the slow path.
7662 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007663 RegisterSet caller_saves = RegisterSet::Empty();
7664 InvokeRuntimeCallingConvention calling_convention;
7665 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7666 locations->SetCustomSlowPathCallerSaves(caller_saves);
7667 } else {
7668 // For non-Baker read barriers we have a temp-clobbering call.
7669 }
7670 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007671}
7672
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007673// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7674// move.
7675void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
Vladimir Marko41559982017-01-06 14:04:23 +00007676 HLoadClass::LoadKind load_kind = cls->GetLoadKind();
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007677 if (load_kind == HLoadClass::LoadKind::kRuntimeCall) {
Vladimir Marko41559982017-01-06 14:04:23 +00007678 codegen_->GenerateLoadClassRuntimeCall(cls);
Pavle Batutae87a7182015-10-28 13:10:42 +01007679 return;
7680 }
Vladimir Marko41559982017-01-06 14:04:23 +00007681 DCHECK(!cls->NeedsAccessCheck());
Pavle Batutae87a7182015-10-28 13:10:42 +01007682
Vladimir Marko41559982017-01-06 14:04:23 +00007683 LocationSummary* locations = cls->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007684 Location out_loc = locations->Out();
7685 Register out = out_loc.AsRegister<Register>();
7686 Register base_or_current_method_reg;
7687 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7688 switch (load_kind) {
7689 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007690 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007691 case HLoadClass::LoadKind::kBootImageAddress:
7692 case HLoadClass::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007693 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7694 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007695 case HLoadClass::LoadKind::kReferrersClass:
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007696 case HLoadClass::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007697 base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
7698 break;
7699 default:
7700 base_or_current_method_reg = ZERO;
7701 break;
7702 }
Nicolas Geoffray42e372e2015-11-24 15:48:56 +00007703
Alexey Frunze15958152017-02-09 19:08:30 -08007704 const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
7705 ? kWithoutReadBarrier
7706 : kCompilerReadBarrierOption;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007707 bool generate_null_check = false;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007708 CodeGeneratorMIPS::PcRelativePatchInfo* bss_info_high = nullptr;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007709 switch (load_kind) {
7710 case HLoadClass::LoadKind::kReferrersClass: {
7711 DCHECK(!cls->CanCallRuntime());
7712 DCHECK(!cls->MustGenerateClinitCheck());
7713 // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_
7714 GenerateGcRootFieldLoad(cls,
7715 out_loc,
7716 base_or_current_method_reg,
Alexey Frunze15958152017-02-09 19:08:30 -08007717 ArtMethod::DeclaringClassOffset().Int32Value(),
7718 read_barrier_option);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007719 break;
7720 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007721 case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007722 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze15958152017-02-09 19:08:30 -08007723 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007724 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Alexey Frunze06a46c42016-07-19 15:00:40 -07007725 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007726 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7727 codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007728 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7729 out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007730 base_or_current_method_reg);
7731 __ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007732 break;
7733 }
7734 case HLoadClass::LoadKind::kBootImageAddress: {
Alexey Frunze15958152017-02-09 19:08:30 -08007735 DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Nicolas Geoffray5247c082017-01-13 14:17:29 +00007736 uint32_t address = dchecked_integral_cast<uint32_t>(
7737 reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
7738 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007739 __ LoadLiteral(out,
7740 base_or_current_method_reg,
7741 codegen_->DeduplicateBootImageAddressLiteral(address));
7742 break;
7743 }
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007744 case HLoadClass::LoadKind::kBssEntry: {
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007745 bss_info_high = codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
7746 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7747 codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex(), bss_info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007748 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007749 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007750 codegen_->EmitPcRelativeAddressPlaceholderHigh(bss_info_high,
7751 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007752 base_or_current_method_reg);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007753 GenerateGcRootFieldLoad(cls,
7754 out_loc,
7755 temp,
7756 /* placeholder */ 0x5678,
7757 read_barrier_option,
7758 &info_low->label);
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007759 generate_null_check = true;
7760 break;
7761 }
Nicolas Geoffray22384ae2016-12-12 22:33:36 +00007762 case HLoadClass::LoadKind::kJitTableAddress: {
Alexey Frunze627c1a02017-01-30 19:28:14 -08007763 CodeGeneratorMIPS::JitPatchInfo* info = codegen_->NewJitRootClassPatch(cls->GetDexFile(),
7764 cls->GetTypeIndex(),
7765 cls->GetClass());
7766 bool reordering = __ SetReorder(false);
7767 __ Bind(&info->high_label);
7768 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007769 __ SetReorder(reordering);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007770 GenerateGcRootFieldLoad(cls,
7771 out_loc,
7772 out,
7773 /* placeholder */ 0x5678,
7774 read_barrier_option,
7775 &info->low_label);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007776 break;
7777 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007778 case HLoadClass::LoadKind::kRuntimeCall:
Nicolas Geoffray83c8e272017-01-31 14:36:37 +00007779 case HLoadClass::LoadKind::kInvalid:
Vladimir Marko41559982017-01-06 14:04:23 +00007780 LOG(FATAL) << "UNREACHABLE";
7781 UNREACHABLE();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007782 }
7783
7784 if (generate_null_check || cls->MustGenerateClinitCheck()) {
7785 DCHECK(cls->CanCallRuntime());
7786 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathMIPS(
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007787 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck(), bss_info_high);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007788 codegen_->AddSlowPath(slow_path);
7789 if (generate_null_check) {
7790 __ Beqz(out, slow_path->GetEntryLabel());
7791 }
7792 if (cls->MustGenerateClinitCheck()) {
7793 GenerateClassInitializationCheck(slow_path, out);
7794 } else {
7795 __ Bind(slow_path->GetExitLabel());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007796 }
7797 }
7798}
7799
7800static int32_t GetExceptionTlsOffset() {
Andreas Gampe542451c2016-07-26 09:02:02 -07007801 return Thread::ExceptionOffset<kMipsPointerSize>().Int32Value();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007802}
7803
7804void LocationsBuilderMIPS::VisitLoadException(HLoadException* load) {
7805 LocationSummary* locations =
7806 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
7807 locations->SetOut(Location::RequiresRegister());
7808}
7809
7810void InstructionCodeGeneratorMIPS::VisitLoadException(HLoadException* load) {
7811 Register out = load->GetLocations()->Out().AsRegister<Register>();
7812 __ LoadFromOffset(kLoadWord, out, TR, GetExceptionTlsOffset());
7813}
7814
7815void LocationsBuilderMIPS::VisitClearException(HClearException* clear) {
7816 new (GetGraph()->GetArena()) LocationSummary(clear, LocationSummary::kNoCall);
7817}
7818
7819void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear ATTRIBUTE_UNUSED) {
7820 __ StoreToOffset(kStoreWord, ZERO, TR, GetExceptionTlsOffset());
7821}
7822
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007823void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
Alexey Frunzef63f5692016-12-13 17:43:11 -08007824 LocationSummary::CallKind call_kind = CodeGenerator::GetLoadStringCallKind(load);
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007825 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007826 HLoadString::LoadKind load_kind = load->GetLoadKind();
Alexey Frunzec61c0762017-04-10 13:54:23 -07007827 const bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007828 switch (load_kind) {
7829 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007830 case HLoadString::LoadKind::kBootImageAddress:
7831 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007832 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007833 case HLoadString::LoadKind::kBssEntry:
Alexey Frunzec61c0762017-04-10 13:54:23 -07007834 if (isR6) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007835 break;
7836 }
7837 FALLTHROUGH_INTENDED;
7838 // We need an extra register for PC-relative dex cache accesses.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007839 case HLoadString::LoadKind::kRuntimeCall:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007840 locations->SetInAt(0, Location::RequiresRegister());
7841 break;
7842 default:
7843 break;
7844 }
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007845 if (load_kind == HLoadString::LoadKind::kRuntimeCall) {
Alexey Frunzebb51df82016-11-01 16:07:32 -07007846 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007847 locations->SetOut(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
Alexey Frunzebb51df82016-11-01 16:07:32 -07007848 } else {
7849 locations->SetOut(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007850 if (load_kind == HLoadString::LoadKind::kBssEntry) {
7851 if (!kUseReadBarrier || kUseBakerReadBarrier) {
7852 // Rely on the pResolveString and marking to save everything we need.
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007853 // Request a temp to hold the BSS entry location for the slow path.
7854 locations->AddTemp(Location::RequiresRegister());
Alexey Frunzec61c0762017-04-10 13:54:23 -07007855 RegisterSet caller_saves = RegisterSet::Empty();
7856 InvokeRuntimeCallingConvention calling_convention;
7857 caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7858 locations->SetCustomSlowPathCallerSaves(caller_saves);
7859 } else {
7860 // For non-Baker read barriers we have a temp-clobbering call.
7861 }
7862 }
Alexey Frunzebb51df82016-11-01 16:07:32 -07007863 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007864}
7865
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007866// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
7867// move.
7868void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_SAFETY_ANALYSIS {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007869 HLoadString::LoadKind load_kind = load->GetLoadKind();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007870 LocationSummary* locations = load->GetLocations();
Alexey Frunze06a46c42016-07-19 15:00:40 -07007871 Location out_loc = locations->Out();
7872 Register out = out_loc.AsRegister<Register>();
7873 Register base_or_current_method_reg;
7874 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
7875 switch (load_kind) {
7876 // We need an extra register for PC-relative literals on R2.
Alexey Frunze06a46c42016-07-19 15:00:40 -07007877 case HLoadString::LoadKind::kBootImageAddress:
7878 case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007879 case HLoadString::LoadKind::kBootImageInternTable:
Vladimir Markoaad75c62016-10-03 08:46:48 +00007880 case HLoadString::LoadKind::kBssEntry:
Alexey Frunze06a46c42016-07-19 15:00:40 -07007881 base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
7882 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007883 default:
7884 base_or_current_method_reg = ZERO;
7885 break;
7886 }
7887
7888 switch (load_kind) {
Alexey Frunze06a46c42016-07-19 15:00:40 -07007889 case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007890 DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007891 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007892 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007893 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7894 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007895 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7896 out,
Alexey Frunzea663d9d2017-07-31 18:43:18 -07007897 base_or_current_method_reg);
7898 __ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007899 return;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007900 }
7901 case HLoadString::LoadKind::kBootImageAddress: {
Nicolas Geoffrayf0acfe72017-01-09 20:54:52 +00007902 uint32_t address = dchecked_integral_cast<uint32_t>(
7903 reinterpret_cast<uintptr_t>(load->GetString().Get()));
7904 DCHECK_NE(address, 0u);
Alexey Frunze06a46c42016-07-19 15:00:40 -07007905 __ LoadLiteral(out,
7906 base_or_current_method_reg,
7907 codegen_->DeduplicateBootImageAddressLiteral(address));
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007908 return;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007909 }
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007910 case HLoadString::LoadKind::kBootImageInternTable: {
Vladimir Markoaad75c62016-10-03 08:46:48 +00007911 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007912 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
Vladimir Marko6bec91c2017-01-09 15:03:12 +00007913 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007914 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7915 codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Vladimir Marko6cfbdbc2017-07-25 13:26:39 +01007916 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7917 out,
7918 base_or_current_method_reg);
7919 __ Lw(out, out, /* placeholder */ 0x5678, &info_low->label);
7920 return;
7921 }
7922 case HLoadString::LoadKind::kBssEntry: {
7923 DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
7924 CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
7925 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
7926 CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
7927 codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
Alexey Frunzec61c0762017-04-10 13:54:23 -07007928 constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007929 Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007930 codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
7931 temp,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007932 base_or_current_method_reg);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007933 GenerateGcRootFieldLoad(load,
7934 out_loc,
7935 temp,
7936 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007937 kCompilerReadBarrierOption,
7938 &info_low->label);
Alexey Frunze5fa5c042017-06-01 21:07:52 -07007939 SlowPathCodeMIPS* slow_path =
7940 new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load, info_high);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007941 codegen_->AddSlowPath(slow_path);
7942 __ Beqz(out, slow_path->GetEntryLabel());
7943 __ Bind(slow_path->GetExitLabel());
7944 return;
7945 }
Alexey Frunze627c1a02017-01-30 19:28:14 -08007946 case HLoadString::LoadKind::kJitTableAddress: {
7947 CodeGeneratorMIPS::JitPatchInfo* info =
7948 codegen_->NewJitRootStringPatch(load->GetDexFile(),
7949 load->GetStringIndex(),
7950 load->GetString());
7951 bool reordering = __ SetReorder(false);
7952 __ Bind(&info->high_label);
7953 __ Lui(out, /* placeholder */ 0x1234);
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007954 __ SetReorder(reordering);
Alexey Frunze15958152017-02-09 19:08:30 -08007955 GenerateGcRootFieldLoad(load,
7956 out_loc,
7957 out,
7958 /* placeholder */ 0x5678,
Alexey Frunze4147fcc2017-06-17 19:57:27 -07007959 kCompilerReadBarrierOption,
7960 &info->low_label);
Alexey Frunze627c1a02017-01-30 19:28:14 -08007961 return;
7962 }
Alexey Frunze06a46c42016-07-19 15:00:40 -07007963 default:
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007964 break;
Alexey Frunze06a46c42016-07-19 15:00:40 -07007965 }
Nicolas Geoffray917d0162015-11-24 18:25:35 +00007966
Christina Wadsworthbf44e0e2016-08-18 10:37:42 -07007967 // TODO: Re-add the compiler code to do string dex cache lookup again.
Vladimir Marko847e6ce2017-06-02 13:55:07 +01007968 DCHECK(load_kind == HLoadString::LoadKind::kRuntimeCall);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007969 InvokeRuntimeCallingConvention calling_convention;
Alexey Frunzec61c0762017-04-10 13:54:23 -07007970 DCHECK_EQ(calling_convention.GetRegisterAt(0), out);
Andreas Gampe8a0128a2016-11-28 07:38:35 -08007971 __ LoadConst32(calling_convention.GetRegisterAt(0), load->GetStringIndex().index_);
Vladimir Markoaad75c62016-10-03 08:46:48 +00007972 codegen_->InvokeRuntime(kQuickResolveString, load, load->GetDexPc());
7973 CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007974}
7975
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007976void LocationsBuilderMIPS::VisitLongConstant(HLongConstant* constant) {
7977 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
7978 locations->SetOut(Location::ConstantLocation(constant));
7979}
7980
7981void InstructionCodeGeneratorMIPS::VisitLongConstant(HLongConstant* constant ATTRIBUTE_UNUSED) {
7982 // Will be generated at use site.
7983}
7984
7985void LocationsBuilderMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7986 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01007987 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007988 InvokeRuntimeCallingConvention calling_convention;
7989 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
7990}
7991
7992void InstructionCodeGeneratorMIPS::VisitMonitorOperation(HMonitorOperation* instruction) {
7993 if (instruction->IsEnter()) {
Serban Constantinescufca16662016-07-14 09:21:59 +01007994 codegen_->InvokeRuntime(kQuickLockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007995 CheckEntrypointTypes<kQuickLockObject, void, mirror::Object*>();
7996 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01007997 codegen_->InvokeRuntime(kQuickUnlockObject, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02007998 }
7999 CheckEntrypointTypes<kQuickUnlockObject, void, mirror::Object*>();
8000}
8001
8002void LocationsBuilderMIPS::VisitMul(HMul* mul) {
8003 LocationSummary* locations =
8004 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
8005 switch (mul->GetResultType()) {
8006 case Primitive::kPrimInt:
8007 case Primitive::kPrimLong:
8008 locations->SetInAt(0, Location::RequiresRegister());
8009 locations->SetInAt(1, Location::RequiresRegister());
8010 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8011 break;
8012
8013 case Primitive::kPrimFloat:
8014 case Primitive::kPrimDouble:
8015 locations->SetInAt(0, Location::RequiresFpuRegister());
8016 locations->SetInAt(1, Location::RequiresFpuRegister());
8017 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8018 break;
8019
8020 default:
8021 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
8022 }
8023}
8024
8025void InstructionCodeGeneratorMIPS::VisitMul(HMul* instruction) {
8026 Primitive::Type type = instruction->GetType();
8027 LocationSummary* locations = instruction->GetLocations();
8028 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
8029
8030 switch (type) {
8031 case Primitive::kPrimInt: {
8032 Register dst = locations->Out().AsRegister<Register>();
8033 Register lhs = locations->InAt(0).AsRegister<Register>();
8034 Register rhs = locations->InAt(1).AsRegister<Register>();
8035
8036 if (isR6) {
8037 __ MulR6(dst, lhs, rhs);
8038 } else {
8039 __ MulR2(dst, lhs, rhs);
8040 }
8041 break;
8042 }
8043 case Primitive::kPrimLong: {
8044 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8045 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8046 Register lhs_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8047 Register lhs_low = locations->InAt(0).AsRegisterPairLow<Register>();
8048 Register rhs_high = locations->InAt(1).AsRegisterPairHigh<Register>();
8049 Register rhs_low = locations->InAt(1).AsRegisterPairLow<Register>();
8050
8051 // Extra checks to protect caused by the existance of A1_A2.
8052 // The algorithm is wrong if dst_high is either lhs_lo or rhs_lo:
8053 // (e.g. lhs=a0_a1, rhs=a2_a3 and dst=a1_a2).
8054 DCHECK_NE(dst_high, lhs_low);
8055 DCHECK_NE(dst_high, rhs_low);
8056
8057 // A_B * C_D
8058 // dst_hi: [ low(A*D) + low(B*C) + hi(B*D) ]
8059 // dst_lo: [ low(B*D) ]
8060 // Note: R2 and R6 MUL produce the low 32 bit of the multiplication result.
8061
8062 if (isR6) {
8063 __ MulR6(TMP, lhs_high, rhs_low);
8064 __ MulR6(dst_high, lhs_low, rhs_high);
8065 __ Addu(dst_high, dst_high, TMP);
8066 __ MuhuR6(TMP, lhs_low, rhs_low);
8067 __ Addu(dst_high, dst_high, TMP);
8068 __ MulR6(dst_low, lhs_low, rhs_low);
8069 } else {
8070 __ MulR2(TMP, lhs_high, rhs_low);
8071 __ MulR2(dst_high, lhs_low, rhs_high);
8072 __ Addu(dst_high, dst_high, TMP);
8073 __ MultuR2(lhs_low, rhs_low);
8074 __ Mfhi(TMP);
8075 __ Addu(dst_high, dst_high, TMP);
8076 __ Mflo(dst_low);
8077 }
8078 break;
8079 }
8080 case Primitive::kPrimFloat:
8081 case Primitive::kPrimDouble: {
8082 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8083 FRegister lhs = locations->InAt(0).AsFpuRegister<FRegister>();
8084 FRegister rhs = locations->InAt(1).AsFpuRegister<FRegister>();
8085 if (type == Primitive::kPrimFloat) {
8086 __ MulS(dst, lhs, rhs);
8087 } else {
8088 __ MulD(dst, lhs, rhs);
8089 }
8090 break;
8091 }
8092 default:
8093 LOG(FATAL) << "Unexpected mul type " << type;
8094 }
8095}
8096
8097void LocationsBuilderMIPS::VisitNeg(HNeg* neg) {
8098 LocationSummary* locations =
8099 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
8100 switch (neg->GetResultType()) {
8101 case Primitive::kPrimInt:
8102 case Primitive::kPrimLong:
8103 locations->SetInAt(0, Location::RequiresRegister());
8104 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8105 break;
8106
8107 case Primitive::kPrimFloat:
8108 case Primitive::kPrimDouble:
8109 locations->SetInAt(0, Location::RequiresFpuRegister());
8110 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8111 break;
8112
8113 default:
8114 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
8115 }
8116}
8117
8118void InstructionCodeGeneratorMIPS::VisitNeg(HNeg* instruction) {
8119 Primitive::Type type = instruction->GetType();
8120 LocationSummary* locations = instruction->GetLocations();
8121
8122 switch (type) {
8123 case Primitive::kPrimInt: {
8124 Register dst = locations->Out().AsRegister<Register>();
8125 Register src = locations->InAt(0).AsRegister<Register>();
8126 __ Subu(dst, ZERO, src);
8127 break;
8128 }
8129 case Primitive::kPrimLong: {
8130 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8131 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8132 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8133 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8134 __ Subu(dst_low, ZERO, src_low);
8135 __ Sltu(TMP, ZERO, dst_low);
8136 __ Subu(dst_high, ZERO, src_high);
8137 __ Subu(dst_high, dst_high, TMP);
8138 break;
8139 }
8140 case Primitive::kPrimFloat:
8141 case Primitive::kPrimDouble: {
8142 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8143 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8144 if (type == Primitive::kPrimFloat) {
8145 __ NegS(dst, src);
8146 } else {
8147 __ NegD(dst, src);
8148 }
8149 break;
8150 }
8151 default:
8152 LOG(FATAL) << "Unexpected neg type " << type;
8153 }
8154}
8155
8156void LocationsBuilderMIPS::VisitNewArray(HNewArray* instruction) {
8157 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008158 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008159 InvokeRuntimeCallingConvention calling_convention;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008160 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008161 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8162 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008163}
8164
8165void InstructionCodeGeneratorMIPS::VisitNewArray(HNewArray* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008166 // Note: if heap poisoning is enabled, the entry point takes care
8167 // of poisoning the reference.
Goran Jakovljevic854df412017-06-27 14:41:39 +02008168 QuickEntrypointEnum entrypoint =
8169 CodeGenerator::GetArrayAllocationEntrypoint(instruction->GetLoadClass()->GetClass());
8170 codegen_->InvokeRuntime(entrypoint, instruction, instruction->GetDexPc());
Nicolas Geoffraye761bcc2017-01-19 08:59:37 +00008171 CheckEntrypointTypes<kQuickAllocArrayResolved, void*, mirror::Class*, int32_t>();
Goran Jakovljevic854df412017-06-27 14:41:39 +02008172 DCHECK(!codegen_->IsLeafMethod());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008173}
8174
8175void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
8176 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008177 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008178 InvokeRuntimeCallingConvention calling_convention;
David Brazdil6de19382016-01-08 17:37:10 +00008179 if (instruction->IsStringAlloc()) {
8180 locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
8181 } else {
8182 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
David Brazdil6de19382016-01-08 17:37:10 +00008183 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008184 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
8185}
8186
8187void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
Alexey Frunzec061de12017-02-14 13:27:23 -08008188 // Note: if heap poisoning is enabled, the entry point takes care
8189 // of poisoning the reference.
David Brazdil6de19382016-01-08 17:37:10 +00008190 if (instruction->IsStringAlloc()) {
8191 // String is allocated through StringFactory. Call NewEmptyString entry point.
8192 Register temp = instruction->GetLocations()->GetTemp(0).AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07008193 MemberOffset code_offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsPointerSize);
David Brazdil6de19382016-01-08 17:37:10 +00008194 __ LoadFromOffset(kLoadWord, temp, TR, QUICK_ENTRY_POINT(pNewEmptyString));
8195 __ LoadFromOffset(kLoadWord, T9, temp, code_offset.Int32Value());
8196 __ Jalr(T9);
Alexey Frunze57eb0f52016-07-29 22:04:46 -07008197 __ NopIfNoReordering();
David Brazdil6de19382016-01-08 17:37:10 +00008198 codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
8199 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008200 codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +00008201 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
David Brazdil6de19382016-01-08 17:37:10 +00008202 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008203}
8204
8205void LocationsBuilderMIPS::VisitNot(HNot* instruction) {
8206 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8207 locations->SetInAt(0, Location::RequiresRegister());
8208 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8209}
8210
8211void InstructionCodeGeneratorMIPS::VisitNot(HNot* instruction) {
8212 Primitive::Type type = instruction->GetType();
8213 LocationSummary* locations = instruction->GetLocations();
8214
8215 switch (type) {
8216 case Primitive::kPrimInt: {
8217 Register dst = locations->Out().AsRegister<Register>();
8218 Register src = locations->InAt(0).AsRegister<Register>();
8219 __ Nor(dst, src, ZERO);
8220 break;
8221 }
8222
8223 case Primitive::kPrimLong: {
8224 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8225 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8226 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8227 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8228 __ Nor(dst_high, src_high, ZERO);
8229 __ Nor(dst_low, src_low, ZERO);
8230 break;
8231 }
8232
8233 default:
8234 LOG(FATAL) << "Unexpected type for not operation " << instruction->GetResultType();
8235 }
8236}
8237
8238void LocationsBuilderMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8239 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8240 locations->SetInAt(0, Location::RequiresRegister());
8241 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8242}
8243
8244void InstructionCodeGeneratorMIPS::VisitBooleanNot(HBooleanNot* instruction) {
8245 LocationSummary* locations = instruction->GetLocations();
8246 __ Xori(locations->Out().AsRegister<Register>(),
8247 locations->InAt(0).AsRegister<Register>(),
8248 1);
8249}
8250
8251void LocationsBuilderMIPS::VisitNullCheck(HNullCheck* instruction) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01008252 LocationSummary* locations = codegen_->CreateThrowingSlowPathLocations(instruction);
8253 locations->SetInAt(0, Location::RequiresRegister());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008254}
8255
Calin Juravle2ae48182016-03-16 14:05:09 +00008256void CodeGeneratorMIPS::GenerateImplicitNullCheck(HNullCheck* instruction) {
8257 if (CanMoveNullCheckToUser(instruction)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008258 return;
8259 }
8260 Location obj = instruction->GetLocations()->InAt(0);
8261
8262 __ Lw(ZERO, obj.AsRegister<Register>(), 0);
Calin Juravle2ae48182016-03-16 14:05:09 +00008263 RecordPcInfo(instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008264}
8265
Calin Juravle2ae48182016-03-16 14:05:09 +00008266void CodeGeneratorMIPS::GenerateExplicitNullCheck(HNullCheck* instruction) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008267 SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathMIPS(instruction);
Calin Juravle2ae48182016-03-16 14:05:09 +00008268 AddSlowPath(slow_path);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008269
8270 Location obj = instruction->GetLocations()->InAt(0);
8271
8272 __ Beqz(obj.AsRegister<Register>(), slow_path->GetEntryLabel());
8273}
8274
8275void InstructionCodeGeneratorMIPS::VisitNullCheck(HNullCheck* instruction) {
Calin Juravle2ae48182016-03-16 14:05:09 +00008276 codegen_->GenerateNullCheck(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008277}
8278
8279void LocationsBuilderMIPS::VisitOr(HOr* instruction) {
8280 HandleBinaryOp(instruction);
8281}
8282
8283void InstructionCodeGeneratorMIPS::VisitOr(HOr* instruction) {
8284 HandleBinaryOp(instruction);
8285}
8286
8287void LocationsBuilderMIPS::VisitParallelMove(HParallelMove* instruction ATTRIBUTE_UNUSED) {
8288 LOG(FATAL) << "Unreachable";
8289}
8290
8291void InstructionCodeGeneratorMIPS::VisitParallelMove(HParallelMove* instruction) {
8292 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
8293}
8294
8295void LocationsBuilderMIPS::VisitParameterValue(HParameterValue* instruction) {
8296 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
8297 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
8298 if (location.IsStackSlot()) {
8299 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8300 } else if (location.IsDoubleStackSlot()) {
8301 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
8302 }
8303 locations->SetOut(location);
8304}
8305
8306void InstructionCodeGeneratorMIPS::VisitParameterValue(HParameterValue* instruction
8307 ATTRIBUTE_UNUSED) {
8308 // Nothing to do, the parameter is already at its location.
8309}
8310
8311void LocationsBuilderMIPS::VisitCurrentMethod(HCurrentMethod* instruction) {
8312 LocationSummary* locations =
8313 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
8314 locations->SetOut(Location::RegisterLocation(kMethodRegisterArgument));
8315}
8316
8317void InstructionCodeGeneratorMIPS::VisitCurrentMethod(HCurrentMethod* instruction
8318 ATTRIBUTE_UNUSED) {
8319 // Nothing to do, the method is already at its location.
8320}
8321
8322void LocationsBuilderMIPS::VisitPhi(HPhi* instruction) {
8323 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
Vladimir Marko372f10e2016-05-17 16:30:10 +01008324 for (size_t i = 0, e = locations->GetInputCount(); i < e; ++i) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008325 locations->SetInAt(i, Location::Any());
8326 }
8327 locations->SetOut(Location::Any());
8328}
8329
8330void InstructionCodeGeneratorMIPS::VisitPhi(HPhi* instruction ATTRIBUTE_UNUSED) {
8331 LOG(FATAL) << "Unreachable";
8332}
8333
8334void LocationsBuilderMIPS::VisitRem(HRem* rem) {
8335 Primitive::Type type = rem->GetResultType();
8336 LocationSummary::CallKind call_kind =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008337 (type == Primitive::kPrimInt) ? LocationSummary::kNoCall : LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008338 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
8339
8340 switch (type) {
8341 case Primitive::kPrimInt:
8342 locations->SetInAt(0, Location::RequiresRegister());
Alexey Frunze7e99e052015-11-24 19:28:01 -08008343 locations->SetInAt(1, Location::RegisterOrConstant(rem->InputAt(1)));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008344 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8345 break;
8346
8347 case Primitive::kPrimLong: {
8348 InvokeRuntimeCallingConvention calling_convention;
8349 locations->SetInAt(0, Location::RegisterPairLocation(
8350 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8351 locations->SetInAt(1, Location::RegisterPairLocation(
8352 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
8353 locations->SetOut(calling_convention.GetReturnLocation(type));
8354 break;
8355 }
8356
8357 case Primitive::kPrimFloat:
8358 case Primitive::kPrimDouble: {
8359 InvokeRuntimeCallingConvention calling_convention;
8360 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8361 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
8362 locations->SetOut(calling_convention.GetReturnLocation(type));
8363 break;
8364 }
8365
8366 default:
8367 LOG(FATAL) << "Unexpected rem type " << type;
8368 }
8369}
8370
8371void InstructionCodeGeneratorMIPS::VisitRem(HRem* instruction) {
8372 Primitive::Type type = instruction->GetType();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008373
8374 switch (type) {
Alexey Frunze7e99e052015-11-24 19:28:01 -08008375 case Primitive::kPrimInt:
8376 GenerateDivRemIntegral(instruction);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008377 break;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008378 case Primitive::kPrimLong: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008379 codegen_->InvokeRuntime(kQuickLmod, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008380 CheckEntrypointTypes<kQuickLmod, int64_t, int64_t, int64_t>();
8381 break;
8382 }
8383 case Primitive::kPrimFloat: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008384 codegen_->InvokeRuntime(kQuickFmodf, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008385 CheckEntrypointTypes<kQuickFmodf, float, float, float>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008386 break;
8387 }
8388 case Primitive::kPrimDouble: {
Serban Constantinescufca16662016-07-14 09:21:59 +01008389 codegen_->InvokeRuntime(kQuickFmod, instruction, instruction->GetDexPc());
Roland Levillain888d0672015-11-23 18:53:50 +00008390 CheckEntrypointTypes<kQuickFmod, double, double, double>();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008391 break;
8392 }
8393 default:
8394 LOG(FATAL) << "Unexpected rem type " << type;
8395 }
8396}
8397
Igor Murashkind01745e2017-04-05 16:40:31 -07008398void LocationsBuilderMIPS::VisitConstructorFence(HConstructorFence* constructor_fence) {
8399 constructor_fence->SetLocations(nullptr);
8400}
8401
8402void InstructionCodeGeneratorMIPS::VisitConstructorFence(
8403 HConstructorFence* constructor_fence ATTRIBUTE_UNUSED) {
8404 GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
8405}
8406
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008407void LocationsBuilderMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8408 memory_barrier->SetLocations(nullptr);
8409}
8410
8411void InstructionCodeGeneratorMIPS::VisitMemoryBarrier(HMemoryBarrier* memory_barrier) {
8412 GenerateMemoryBarrier(memory_barrier->GetBarrierKind());
8413}
8414
8415void LocationsBuilderMIPS::VisitReturn(HReturn* ret) {
8416 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
8417 Primitive::Type return_type = ret->InputAt(0)->GetType();
8418 locations->SetInAt(0, MipsReturnLocation(return_type));
8419}
8420
8421void InstructionCodeGeneratorMIPS::VisitReturn(HReturn* ret ATTRIBUTE_UNUSED) {
8422 codegen_->GenerateFrameExit();
8423}
8424
8425void LocationsBuilderMIPS::VisitReturnVoid(HReturnVoid* ret) {
8426 ret->SetLocations(nullptr);
8427}
8428
8429void InstructionCodeGeneratorMIPS::VisitReturnVoid(HReturnVoid* ret ATTRIBUTE_UNUSED) {
8430 codegen_->GenerateFrameExit();
8431}
8432
Alexey Frunze92d90602015-12-18 18:16:36 -08008433void LocationsBuilderMIPS::VisitRor(HRor* ror) {
8434 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008435}
8436
Alexey Frunze92d90602015-12-18 18:16:36 -08008437void InstructionCodeGeneratorMIPS::VisitRor(HRor* ror) {
8438 HandleShift(ror);
Scott Wakeling40a04bf2015-12-11 09:50:36 +00008439}
8440
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008441void LocationsBuilderMIPS::VisitShl(HShl* shl) {
8442 HandleShift(shl);
8443}
8444
8445void InstructionCodeGeneratorMIPS::VisitShl(HShl* shl) {
8446 HandleShift(shl);
8447}
8448
8449void LocationsBuilderMIPS::VisitShr(HShr* shr) {
8450 HandleShift(shr);
8451}
8452
8453void InstructionCodeGeneratorMIPS::VisitShr(HShr* shr) {
8454 HandleShift(shr);
8455}
8456
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008457void LocationsBuilderMIPS::VisitSub(HSub* instruction) {
8458 HandleBinaryOp(instruction);
8459}
8460
8461void InstructionCodeGeneratorMIPS::VisitSub(HSub* instruction) {
8462 HandleBinaryOp(instruction);
8463}
8464
8465void LocationsBuilderMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8466 HandleFieldGet(instruction, instruction->GetFieldInfo());
8467}
8468
8469void InstructionCodeGeneratorMIPS::VisitStaticFieldGet(HStaticFieldGet* instruction) {
8470 HandleFieldGet(instruction, instruction->GetFieldInfo(), instruction->GetDexPc());
8471}
8472
8473void LocationsBuilderMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
8474 HandleFieldSet(instruction, instruction->GetFieldInfo());
8475}
8476
8477void InstructionCodeGeneratorMIPS::VisitStaticFieldSet(HStaticFieldSet* instruction) {
Goran Jakovljevice114da22016-12-26 14:21:43 +01008478 HandleFieldSet(instruction,
8479 instruction->GetFieldInfo(),
8480 instruction->GetDexPc(),
8481 instruction->GetValueCanBeNull());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008482}
8483
8484void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldGet(
8485 HUnresolvedInstanceFieldGet* instruction) {
8486 FieldAccessCallingConventionMIPS calling_convention;
8487 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8488 instruction->GetFieldType(),
8489 calling_convention);
8490}
8491
8492void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldGet(
8493 HUnresolvedInstanceFieldGet* instruction) {
8494 FieldAccessCallingConventionMIPS calling_convention;
8495 codegen_->GenerateUnresolvedFieldAccess(instruction,
8496 instruction->GetFieldType(),
8497 instruction->GetFieldIndex(),
8498 instruction->GetDexPc(),
8499 calling_convention);
8500}
8501
8502void LocationsBuilderMIPS::VisitUnresolvedInstanceFieldSet(
8503 HUnresolvedInstanceFieldSet* instruction) {
8504 FieldAccessCallingConventionMIPS calling_convention;
8505 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8506 instruction->GetFieldType(),
8507 calling_convention);
8508}
8509
8510void InstructionCodeGeneratorMIPS::VisitUnresolvedInstanceFieldSet(
8511 HUnresolvedInstanceFieldSet* instruction) {
8512 FieldAccessCallingConventionMIPS calling_convention;
8513 codegen_->GenerateUnresolvedFieldAccess(instruction,
8514 instruction->GetFieldType(),
8515 instruction->GetFieldIndex(),
8516 instruction->GetDexPc(),
8517 calling_convention);
8518}
8519
8520void LocationsBuilderMIPS::VisitUnresolvedStaticFieldGet(
8521 HUnresolvedStaticFieldGet* instruction) {
8522 FieldAccessCallingConventionMIPS calling_convention;
8523 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8524 instruction->GetFieldType(),
8525 calling_convention);
8526}
8527
8528void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldGet(
8529 HUnresolvedStaticFieldGet* instruction) {
8530 FieldAccessCallingConventionMIPS calling_convention;
8531 codegen_->GenerateUnresolvedFieldAccess(instruction,
8532 instruction->GetFieldType(),
8533 instruction->GetFieldIndex(),
8534 instruction->GetDexPc(),
8535 calling_convention);
8536}
8537
8538void LocationsBuilderMIPS::VisitUnresolvedStaticFieldSet(
8539 HUnresolvedStaticFieldSet* instruction) {
8540 FieldAccessCallingConventionMIPS calling_convention;
8541 codegen_->CreateUnresolvedFieldLocationSummary(instruction,
8542 instruction->GetFieldType(),
8543 calling_convention);
8544}
8545
8546void InstructionCodeGeneratorMIPS::VisitUnresolvedStaticFieldSet(
8547 HUnresolvedStaticFieldSet* instruction) {
8548 FieldAccessCallingConventionMIPS calling_convention;
8549 codegen_->GenerateUnresolvedFieldAccess(instruction,
8550 instruction->GetFieldType(),
8551 instruction->GetFieldIndex(),
8552 instruction->GetDexPc(),
8553 calling_convention);
8554}
8555
8556void LocationsBuilderMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
Vladimir Marko70e97462016-08-09 11:04:26 +01008557 LocationSummary* locations =
8558 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
Lena Djokicca8c2952017-05-29 11:31:46 +02008559 // In suspend check slow path, usually there are no caller-save registers at all.
8560 // If SIMD instructions are present, however, we force spilling all live SIMD
8561 // registers in full width (since the runtime only saves/restores lower part).
8562 locations->SetCustomSlowPathCallerSaves(
8563 GetGraph()->HasSIMD() ? RegisterSet::AllFpu() : RegisterSet::Empty());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008564}
8565
8566void InstructionCodeGeneratorMIPS::VisitSuspendCheck(HSuspendCheck* instruction) {
8567 HBasicBlock* block = instruction->GetBlock();
8568 if (block->GetLoopInformation() != nullptr) {
8569 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
8570 // The back edge will generate the suspend check.
8571 return;
8572 }
8573 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
8574 // The goto will generate the suspend check.
8575 return;
8576 }
8577 GenerateSuspendCheck(instruction, nullptr);
8578}
8579
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008580void LocationsBuilderMIPS::VisitThrow(HThrow* instruction) {
8581 LocationSummary* locations =
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008582 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnMainOnly);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008583 InvokeRuntimeCallingConvention calling_convention;
8584 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
8585}
8586
8587void InstructionCodeGeneratorMIPS::VisitThrow(HThrow* instruction) {
Serban Constantinescufca16662016-07-14 09:21:59 +01008588 codegen_->InvokeRuntime(kQuickDeliverException, instruction, instruction->GetDexPc());
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008589 CheckEntrypointTypes<kQuickDeliverException, void, mirror::Object*>();
8590}
8591
8592void LocationsBuilderMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8593 Primitive::Type input_type = conversion->GetInputType();
8594 Primitive::Type result_type = conversion->GetResultType();
8595 DCHECK_NE(input_type, result_type);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008596 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008597
8598 if ((input_type == Primitive::kPrimNot) || (input_type == Primitive::kPrimVoid) ||
8599 (result_type == Primitive::kPrimNot) || (result_type == Primitive::kPrimVoid)) {
8600 LOG(FATAL) << "Unexpected type conversion from " << input_type << " to " << result_type;
8601 }
8602
8603 LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008604 if (!isR6 &&
8605 ((Primitive::IsFloatingPointType(result_type) && input_type == Primitive::kPrimLong) ||
8606 (result_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(input_type)))) {
Serban Constantinescu54ff4822016-07-07 18:03:19 +01008607 call_kind = LocationSummary::kCallOnMainOnly;
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008608 }
8609
8610 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind);
8611
8612 if (call_kind == LocationSummary::kNoCall) {
8613 if (Primitive::IsFloatingPointType(input_type)) {
8614 locations->SetInAt(0, Location::RequiresFpuRegister());
8615 } else {
8616 locations->SetInAt(0, Location::RequiresRegister());
8617 }
8618
8619 if (Primitive::IsFloatingPointType(result_type)) {
8620 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
8621 } else {
8622 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
8623 }
8624 } else {
8625 InvokeRuntimeCallingConvention calling_convention;
8626
8627 if (Primitive::IsFloatingPointType(input_type)) {
8628 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
8629 } else {
8630 DCHECK_EQ(input_type, Primitive::kPrimLong);
8631 locations->SetInAt(0, Location::RegisterPairLocation(
8632 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
8633 }
8634
8635 locations->SetOut(calling_convention.GetReturnLocation(result_type));
8636 }
8637}
8638
8639void InstructionCodeGeneratorMIPS::VisitTypeConversion(HTypeConversion* conversion) {
8640 LocationSummary* locations = conversion->GetLocations();
8641 Primitive::Type result_type = conversion->GetResultType();
8642 Primitive::Type input_type = conversion->GetInputType();
8643 bool has_sign_extension = codegen_->GetInstructionSetFeatures().IsMipsIsaRevGreaterThanEqual2();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008644 bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008645
8646 DCHECK_NE(input_type, result_type);
8647
8648 if (result_type == Primitive::kPrimLong && Primitive::IsIntegralType(input_type)) {
8649 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8650 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
8651 Register src = locations->InAt(0).AsRegister<Register>();
8652
Alexey Frunzea871ef12016-06-27 15:20:11 -07008653 if (dst_low != src) {
8654 __ Move(dst_low, src);
8655 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008656 __ Sra(dst_high, src, 31);
8657 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) {
8658 Register dst = locations->Out().AsRegister<Register>();
8659 Register src = (input_type == Primitive::kPrimLong)
8660 ? locations->InAt(0).AsRegisterPairLow<Register>()
8661 : locations->InAt(0).AsRegister<Register>();
8662
8663 switch (result_type) {
8664 case Primitive::kPrimChar:
8665 __ Andi(dst, src, 0xFFFF);
8666 break;
8667 case Primitive::kPrimByte:
8668 if (has_sign_extension) {
8669 __ Seb(dst, src);
8670 } else {
8671 __ Sll(dst, src, 24);
8672 __ Sra(dst, dst, 24);
8673 }
8674 break;
8675 case Primitive::kPrimShort:
8676 if (has_sign_extension) {
8677 __ Seh(dst, src);
8678 } else {
8679 __ Sll(dst, src, 16);
8680 __ Sra(dst, dst, 16);
8681 }
8682 break;
8683 case Primitive::kPrimInt:
Alexey Frunzea871ef12016-06-27 15:20:11 -07008684 if (dst != src) {
8685 __ Move(dst, src);
8686 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008687 break;
8688
8689 default:
8690 LOG(FATAL) << "Unexpected type conversion from " << input_type
8691 << " to " << result_type;
8692 }
8693 } else if (Primitive::IsFloatingPointType(result_type) && Primitive::IsIntegralType(input_type)) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008694 if (input_type == Primitive::kPrimLong) {
8695 if (isR6) {
8696 // cvt.s.l/cvt.d.l requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8697 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8698 Register src_high = locations->InAt(0).AsRegisterPairHigh<Register>();
8699 Register src_low = locations->InAt(0).AsRegisterPairLow<Register>();
8700 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8701 __ Mtc1(src_low, FTMP);
8702 __ Mthc1(src_high, FTMP);
8703 if (result_type == Primitive::kPrimFloat) {
8704 __ Cvtsl(dst, FTMP);
8705 } else {
8706 __ Cvtdl(dst, FTMP);
8707 }
8708 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008709 QuickEntrypointEnum entrypoint = (result_type == Primitive::kPrimFloat) ? kQuickL2f
8710 : kQuickL2d;
8711 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008712 if (result_type == Primitive::kPrimFloat) {
8713 CheckEntrypointTypes<kQuickL2f, float, int64_t>();
8714 } else {
8715 CheckEntrypointTypes<kQuickL2d, double, int64_t>();
8716 }
8717 }
8718 } else {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008719 Register src = locations->InAt(0).AsRegister<Register>();
8720 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8721 __ Mtc1(src, FTMP);
8722 if (result_type == Primitive::kPrimFloat) {
8723 __ Cvtsw(dst, FTMP);
8724 } else {
8725 __ Cvtdw(dst, FTMP);
8726 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008727 }
8728 } else if (Primitive::IsIntegralType(result_type) && Primitive::IsFloatingPointType(input_type)) {
8729 CHECK(result_type == Primitive::kPrimInt || result_type == Primitive::kPrimLong);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008730
8731 // When NAN2008=1 (R6), the truncate instruction caps the output at the minimum/maximum
8732 // value of the output type if the input is outside of the range after the truncation or
8733 // produces 0 when the input is a NaN. IOW, the three special cases produce three distinct
8734 // results. This matches the desired float/double-to-int/long conversion exactly.
8735 //
8736 // When NAN2008=0 (R2 and before), the truncate instruction produces the maximum positive
8737 // value when the input is either a NaN or is outside of the range of the output type
8738 // after the truncation. IOW, the three special cases (NaN, too small, too big) produce
8739 // the same result.
8740 //
8741 // The code takes care of the different behaviors by first comparing the input to the
8742 // minimum output value (-2**-63 for truncating to long, -2**-31 for truncating to int).
8743 // If the input is greater than or equal to the minimum, it procedes to the truncate
8744 // instruction, which will handle such an input the same way irrespective of NAN2008.
8745 // Otherwise the input is compared to itself to determine whether it is a NaN or not
8746 // in order to return either zero or the minimum value.
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008747 if (result_type == Primitive::kPrimLong) {
8748 if (isR6) {
8749 // trunc.l.s/trunc.l.d requires MIPSR2+ with FR=1. MIPS32R6 is implemented as a secondary
8750 // architecture on top of MIPS64R6, which has FR=1, and therefore can use the instruction.
8751 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8752 Register dst_high = locations->Out().AsRegisterPairHigh<Register>();
8753 Register dst_low = locations->Out().AsRegisterPairLow<Register>();
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008754
8755 if (input_type == Primitive::kPrimFloat) {
8756 __ TruncLS(FTMP, src);
8757 } else {
8758 __ TruncLD(FTMP, src);
8759 }
8760 __ Mfc1(dst_low, FTMP);
8761 __ Mfhc1(dst_high, FTMP);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008762 } else {
Serban Constantinescufca16662016-07-14 09:21:59 +01008763 QuickEntrypointEnum entrypoint = (input_type == Primitive::kPrimFloat) ? kQuickF2l
8764 : kQuickD2l;
8765 codegen_->InvokeRuntime(entrypoint, conversion, conversion->GetDexPc());
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008766 if (input_type == Primitive::kPrimFloat) {
8767 CheckEntrypointTypes<kQuickF2l, int64_t, float>();
8768 } else {
8769 CheckEntrypointTypes<kQuickD2l, int64_t, double>();
8770 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008771 }
8772 } else {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008773 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8774 Register dst = locations->Out().AsRegister<Register>();
8775 MipsLabel truncate;
8776 MipsLabel done;
8777
Lena Djokicf4e23a82017-05-09 15:43:45 +02008778 if (!isR6) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008779 if (input_type == Primitive::kPrimFloat) {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008780 uint32_t min_val = bit_cast<uint32_t, float>(std::numeric_limits<int32_t>::min());
8781 __ LoadConst32(TMP, min_val);
8782 __ Mtc1(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008783 } else {
Lena Djokicf4e23a82017-05-09 15:43:45 +02008784 uint64_t min_val = bit_cast<uint64_t, double>(std::numeric_limits<int32_t>::min());
8785 __ LoadConst32(TMP, High32Bits(min_val));
8786 __ Mtc1(ZERO, FTMP);
8787 __ MoveToFpuHigh(TMP, FTMP);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008788 }
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008789
8790 if (input_type == Primitive::kPrimFloat) {
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008791 __ ColeS(0, FTMP, src);
8792 } else {
8793 __ ColeD(0, FTMP, src);
8794 }
8795 __ Bc1t(0, &truncate);
8796
8797 if (input_type == Primitive::kPrimFloat) {
8798 __ CeqS(0, src, src);
8799 } else {
8800 __ CeqD(0, src, src);
8801 }
8802 __ LoadConst32(dst, std::numeric_limits<int32_t>::min());
8803 __ Movf(dst, ZERO, 0);
Lena Djokicf4e23a82017-05-09 15:43:45 +02008804
8805 __ B(&done);
8806
8807 __ Bind(&truncate);
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008808 }
8809
Alexey Frunzebaf60b72015-12-22 15:15:03 -08008810 if (input_type == Primitive::kPrimFloat) {
8811 __ TruncWS(FTMP, src);
8812 } else {
8813 __ TruncWD(FTMP, src);
8814 }
8815 __ Mfc1(dst, FTMP);
8816
Lena Djokicf4e23a82017-05-09 15:43:45 +02008817 if (!isR6) {
8818 __ Bind(&done);
8819 }
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008820 }
8821 } else if (Primitive::IsFloatingPointType(result_type) &&
8822 Primitive::IsFloatingPointType(input_type)) {
8823 FRegister dst = locations->Out().AsFpuRegister<FRegister>();
8824 FRegister src = locations->InAt(0).AsFpuRegister<FRegister>();
8825 if (result_type == Primitive::kPrimFloat) {
8826 __ Cvtsd(dst, src);
8827 } else {
8828 __ Cvtds(dst, src);
8829 }
8830 } else {
8831 LOG(FATAL) << "Unexpected or unimplemented type conversion from " << input_type
8832 << " to " << result_type;
8833 }
8834}
8835
8836void LocationsBuilderMIPS::VisitUShr(HUShr* ushr) {
8837 HandleShift(ushr);
8838}
8839
8840void InstructionCodeGeneratorMIPS::VisitUShr(HUShr* ushr) {
8841 HandleShift(ushr);
8842}
8843
8844void LocationsBuilderMIPS::VisitXor(HXor* instruction) {
8845 HandleBinaryOp(instruction);
8846}
8847
8848void InstructionCodeGeneratorMIPS::VisitXor(HXor* instruction) {
8849 HandleBinaryOp(instruction);
8850}
8851
8852void LocationsBuilderMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8853 // Nothing to do, this should be removed during prepare for register allocator.
8854 LOG(FATAL) << "Unreachable";
8855}
8856
8857void InstructionCodeGeneratorMIPS::VisitBoundType(HBoundType* instruction ATTRIBUTE_UNUSED) {
8858 // Nothing to do, this should be removed during prepare for register allocator.
8859 LOG(FATAL) << "Unreachable";
8860}
8861
8862void LocationsBuilderMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008863 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008864}
8865
8866void InstructionCodeGeneratorMIPS::VisitEqual(HEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008867 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008868}
8869
8870void LocationsBuilderMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008871 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008872}
8873
8874void InstructionCodeGeneratorMIPS::VisitNotEqual(HNotEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008875 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008876}
8877
8878void LocationsBuilderMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008879 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008880}
8881
8882void InstructionCodeGeneratorMIPS::VisitLessThan(HLessThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008883 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008884}
8885
8886void LocationsBuilderMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008887 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008888}
8889
8890void InstructionCodeGeneratorMIPS::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008891 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008892}
8893
8894void LocationsBuilderMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008895 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008896}
8897
8898void InstructionCodeGeneratorMIPS::VisitGreaterThan(HGreaterThan* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008899 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008900}
8901
8902void LocationsBuilderMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008903 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008904}
8905
8906void InstructionCodeGeneratorMIPS::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008907 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008908}
8909
8910void LocationsBuilderMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008911 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008912}
8913
8914void InstructionCodeGeneratorMIPS::VisitBelow(HBelow* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008915 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008916}
8917
8918void LocationsBuilderMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008919 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008920}
8921
8922void InstructionCodeGeneratorMIPS::VisitBelowOrEqual(HBelowOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008923 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008924}
8925
8926void LocationsBuilderMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008927 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008928}
8929
8930void InstructionCodeGeneratorMIPS::VisitAbove(HAbove* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008931 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008932}
8933
8934void LocationsBuilderMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008935 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008936}
8937
8938void InstructionCodeGeneratorMIPS::VisitAboveOrEqual(HAboveOrEqual* comp) {
Vladimir Marko5f7b58e2015-11-23 19:49:34 +00008939 HandleCondition(comp);
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008940}
8941
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008942void LocationsBuilderMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
8943 LocationSummary* locations =
8944 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
8945 locations->SetInAt(0, Location::RequiresRegister());
8946}
8947
Alexey Frunze96b66822016-09-10 02:32:44 -07008948void InstructionCodeGeneratorMIPS::GenPackedSwitchWithCompares(Register value_reg,
8949 int32_t lower_bound,
8950 uint32_t num_entries,
8951 HBasicBlock* switch_block,
8952 HBasicBlock* default_block) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008953 // Create a set of compare/jumps.
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008954 Register temp_reg = TMP;
8955 __ Addiu32(temp_reg, value_reg, -lower_bound);
8956 // Jump to default if index is negative
8957 // Note: We don't check the case that index is positive while value < lower_bound, because in
8958 // this case, index >= num_entries must be true. So that we can save one branch instruction.
8959 __ Bltz(temp_reg, codegen_->GetLabelOf(default_block));
8960
Alexey Frunze96b66822016-09-10 02:32:44 -07008961 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008962 // Jump to successors[0] if value == lower_bound.
8963 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[0]));
8964 int32_t last_index = 0;
8965 for (; num_entries - last_index > 2; last_index += 2) {
8966 __ Addiu(temp_reg, temp_reg, -2);
8967 // Jump to successors[last_index + 1] if value < case_value[last_index + 2].
8968 __ Bltz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
8969 // Jump to successors[last_index + 2] if value == case_value[last_index + 2].
8970 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 2]));
8971 }
8972 if (num_entries - last_index == 2) {
8973 // The last missing case_value.
8974 __ Addiu(temp_reg, temp_reg, -1);
8975 __ Beqz(temp_reg, codegen_->GetLabelOf(successors[last_index + 1]));
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008976 }
8977
Vladimir Markof3e0ee22015-12-17 15:23:13 +00008978 // And the default for any other value.
Alexey Frunze96b66822016-09-10 02:32:44 -07008979 if (!codegen_->GoesToNextBlock(switch_block, default_block)) {
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02008980 __ B(codegen_->GetLabelOf(default_block));
8981 }
8982}
8983
Alexey Frunze96b66822016-09-10 02:32:44 -07008984void InstructionCodeGeneratorMIPS::GenTableBasedPackedSwitch(Register value_reg,
8985 Register constant_area,
8986 int32_t lower_bound,
8987 uint32_t num_entries,
8988 HBasicBlock* switch_block,
8989 HBasicBlock* default_block) {
8990 // Create a jump table.
8991 std::vector<MipsLabel*> labels(num_entries);
8992 const ArenaVector<HBasicBlock*>& successors = switch_block->GetSuccessors();
8993 for (uint32_t i = 0; i < num_entries; i++) {
8994 labels[i] = codegen_->GetLabelOf(successors[i]);
8995 }
8996 JumpTable* table = __ CreateJumpTable(std::move(labels));
8997
8998 // Is the value in range?
8999 __ Addiu32(TMP, value_reg, -lower_bound);
9000 if (IsInt<16>(static_cast<int32_t>(num_entries))) {
9001 __ Sltiu(AT, TMP, num_entries);
9002 __ Beqz(AT, codegen_->GetLabelOf(default_block));
9003 } else {
9004 __ LoadConst32(AT, num_entries);
9005 __ Bgeu(TMP, AT, codegen_->GetLabelOf(default_block));
9006 }
9007
9008 // We are in the range of the table.
9009 // Load the target address from the jump table, indexing by the value.
9010 __ LoadLabelAddress(AT, constant_area, table->GetLabel());
Chris Larsencd0295d2017-03-31 15:26:54 -07009011 __ ShiftAndAdd(TMP, TMP, AT, 2, TMP);
Alexey Frunze96b66822016-09-10 02:32:44 -07009012 __ Lw(TMP, TMP, 0);
9013 // Compute the absolute target address by adding the table start address
9014 // (the table contains offsets to targets relative to its start).
9015 __ Addu(TMP, TMP, AT);
9016 // And jump.
9017 __ Jr(TMP);
9018 __ NopIfNoReordering();
9019}
9020
9021void InstructionCodeGeneratorMIPS::VisitPackedSwitch(HPackedSwitch* switch_instr) {
9022 int32_t lower_bound = switch_instr->GetStartValue();
9023 uint32_t num_entries = switch_instr->GetNumEntries();
9024 LocationSummary* locations = switch_instr->GetLocations();
9025 Register value_reg = locations->InAt(0).AsRegister<Register>();
9026 HBasicBlock* switch_block = switch_instr->GetBlock();
9027 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9028
9029 if (codegen_->GetInstructionSetFeatures().IsR6() &&
9030 num_entries > kPackedSwitchJumpTableThreshold) {
9031 // R6 uses PC-relative addressing to access the jump table.
9032 // R2, OTOH, requires an HMipsComputeBaseMethodAddress input to access
9033 // the jump table and it is implemented by changing HPackedSwitch to
9034 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress.
9035 // See VisitMipsPackedSwitch() for the table-based implementation on R2.
9036 GenTableBasedPackedSwitch(value_reg,
9037 ZERO,
9038 lower_bound,
9039 num_entries,
9040 switch_block,
9041 default_block);
9042 } else {
9043 GenPackedSwitchWithCompares(value_reg,
9044 lower_bound,
9045 num_entries,
9046 switch_block,
9047 default_block);
9048 }
9049}
9050
9051void LocationsBuilderMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9052 LocationSummary* locations =
9053 new (GetGraph()->GetArena()) LocationSummary(switch_instr, LocationSummary::kNoCall);
9054 locations->SetInAt(0, Location::RequiresRegister());
9055 // Constant area pointer (HMipsComputeBaseMethodAddress).
9056 locations->SetInAt(1, Location::RequiresRegister());
9057}
9058
9059void InstructionCodeGeneratorMIPS::VisitMipsPackedSwitch(HMipsPackedSwitch* switch_instr) {
9060 int32_t lower_bound = switch_instr->GetStartValue();
9061 uint32_t num_entries = switch_instr->GetNumEntries();
9062 LocationSummary* locations = switch_instr->GetLocations();
9063 Register value_reg = locations->InAt(0).AsRegister<Register>();
9064 Register constant_area = locations->InAt(1).AsRegister<Register>();
9065 HBasicBlock* switch_block = switch_instr->GetBlock();
9066 HBasicBlock* default_block = switch_instr->GetDefaultBlock();
9067
9068 // This is an R2-only path. HPackedSwitch has been changed to
9069 // HMipsPackedSwitch, which bears HMipsComputeBaseMethodAddress
9070 // required to address the jump table relative to PC.
9071 GenTableBasedPackedSwitch(value_reg,
9072 constant_area,
9073 lower_bound,
9074 num_entries,
9075 switch_block,
9076 default_block);
9077}
9078
Alexey Frunzee3fb2452016-05-10 16:08:05 -07009079void LocationsBuilderMIPS::VisitMipsComputeBaseMethodAddress(
9080 HMipsComputeBaseMethodAddress* insn) {
9081 LocationSummary* locations =
9082 new (GetGraph()->GetArena()) LocationSummary(insn, LocationSummary::kNoCall);
9083 locations->SetOut(Location::RequiresRegister());
9084}
9085
9086void InstructionCodeGeneratorMIPS::VisitMipsComputeBaseMethodAddress(
9087 HMipsComputeBaseMethodAddress* insn) {
9088 LocationSummary* locations = insn->GetLocations();
9089 Register reg = locations->Out().AsRegister<Register>();
9090
9091 CHECK(!codegen_->GetInstructionSetFeatures().IsR6());
9092
9093 // Generate a dummy PC-relative call to obtain PC.
9094 __ Nal();
9095 // Grab the return address off RA.
9096 __ Move(reg, RA);
9097
9098 // Remember this offset (the obtained PC value) for later use with constant area.
9099 __ BindPcRelBaseLabel();
9100}
9101
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009102void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9103 // The trampoline uses the same calling convention as dex calling conventions,
9104 // except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain
9105 // the method_idx.
9106 HandleInvoke(invoke);
9107}
9108
9109void InstructionCodeGeneratorMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) {
9110 codegen_->GenerateInvokeUnresolvedRuntimeCall(invoke);
9111}
9112
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009113void LocationsBuilderMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9114 LocationSummary* locations =
9115 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
9116 locations->SetInAt(0, Location::RequiresRegister());
9117 locations->SetOut(Location::RequiresRegister());
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009118}
9119
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009120void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instruction) {
9121 LocationSummary* locations = instruction->GetLocations();
Vladimir Markoa1de9182016-02-25 11:37:38 +00009122 if (instruction->GetTableKind() == HClassTableGet::TableKind::kVTable) {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009123 uint32_t method_offset = mirror::Class::EmbeddedVTableEntryOffset(
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009124 instruction->GetIndex(), kMipsPointerSize).SizeValue();
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009125 __ LoadFromOffset(kLoadWord,
9126 locations->Out().AsRegister<Register>(),
9127 locations->InAt(0).AsRegister<Register>(),
9128 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009129 } else {
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009130 uint32_t method_offset = static_cast<uint32_t>(ImTable::OffsetOfElement(
Matthew Gharrity465ecc82016-07-19 21:32:52 +00009131 instruction->GetIndex(), kMipsPointerSize));
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +00009132 __ LoadFromOffset(kLoadWord,
9133 locations->Out().AsRegister<Register>(),
9134 locations->InAt(0).AsRegister<Register>(),
9135 mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value());
Nicolas Geoffrayff484b92016-07-13 14:13:48 +01009136 __ LoadFromOffset(kLoadWord,
9137 locations->Out().AsRegister<Register>(),
9138 locations->Out().AsRegister<Register>(),
9139 method_offset);
Roland Levillain2aba7cd2016-02-03 12:27:20 +00009140 }
Nicolas Geoffraya42363f2015-12-17 14:57:09 +00009141}
9142
Goran Jakovljevicf652cec2015-08-25 16:11:42 +02009143#undef __
9144#undef QUICK_ENTRY_POINT
9145
9146} // namespace mips
9147} // namespace art